diff --git a/lib/migrations/versions/7a71dbf71c64_create_db.py b/lib/migrations/versions/7a71dbf71c64_create_db.py index 23e0a655b..471b718dd 100644 --- a/lib/migrations/versions/7a71dbf71c64_create_db.py +++ b/lib/migrations/versions/7a71dbf71c64_create_db.py @@ -16,9 +16,14 @@ depends_on = None -def upgrade() -> None: - pass +def upgrade(): + op.create_table('companies', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=True), + sa.Column('founding_year', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) -def downgrade() -> None: - pass +def downgrade(): + op.drop_table('companies') \ No newline at end of file diff --git a/myenv/bin/Activate.ps1 b/myenv/bin/Activate.ps1 new file mode 100644 index 000000000..b49d77ba4 --- /dev/null +++ b/myenv/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/myenv/bin/activate b/myenv/bin/activate new file mode 100644 index 000000000..ed27f555a --- /dev/null +++ b/myenv/bin/activate @@ -0,0 +1,69 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1="(myenv) ${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT="(myenv) " + export VIRTUAL_ENV_PROMPT +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null +fi diff --git a/myenv/bin/activate.csh b/myenv/bin/activate.csh new file mode 100644 index 000000000..d469df674 --- /dev/null +++ b/myenv/bin/activate.csh @@ -0,0 +1,26 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = "(myenv) $prompt" + setenv VIRTUAL_ENV_PROMPT "(myenv) " +endif + +alias pydoc python -m pydoc + +rehash diff --git a/myenv/bin/activate.fish b/myenv/bin/activate.fish new file mode 100644 index 000000000..f4f49199a --- /dev/null +++ b/myenv/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/); you cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) "(myenv) " (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT "(myenv) " +end diff --git a/myenv/bin/alembic b/myenv/bin/alembic new file mode 100755 index 000000000..710cc83e5 --- /dev/null +++ b/myenv/bin/alembic @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from alembic.config import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/ipdb3 b/myenv/bin/ipdb3 new file mode 100755 index 000000000..be97df129 --- /dev/null +++ b/myenv/bin/ipdb3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from ipdb.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/ipython b/myenv/bin/ipython new file mode 100755 index 000000000..0e35d4735 --- /dev/null +++ b/myenv/bin/ipython @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from IPython import start_ipython +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(start_ipython()) diff --git a/myenv/bin/ipython3 b/myenv/bin/ipython3 new file mode 100755 index 000000000..0e35d4735 --- /dev/null +++ b/myenv/bin/ipython3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from IPython import start_ipython +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(start_ipython()) diff --git a/myenv/bin/mako-render b/myenv/bin/mako-render new file mode 100755 index 000000000..2d5856868 --- /dev/null +++ b/myenv/bin/mako-render @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from mako.cmd import cmdline +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cmdline()) diff --git a/myenv/bin/pip b/myenv/bin/pip new file mode 100755 index 000000000..d3a71d68e --- /dev/null +++ b/myenv/bin/pip @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pip3 b/myenv/bin/pip3 new file mode 100755 index 000000000..d3a71d68e --- /dev/null +++ b/myenv/bin/pip3 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pip3.10 b/myenv/bin/pip3.10 new file mode 100755 index 000000000..d3a71d68e --- /dev/null +++ b/myenv/bin/pip3.10 @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python3" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/pygmentize b/myenv/bin/pygmentize new file mode 100755 index 000000000..55b970f81 --- /dev/null +++ b/myenv/bin/pygmentize @@ -0,0 +1,10 @@ +#!/bin/sh +'''exec' "/home/ann/Desktop/Moringa phase 3/python-p3-freebie-tracker/myenv/bin/python" "$0" "$@" +' ''' +# -*- coding: utf-8 -*- +import re +import sys +from pygments.cmdline import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/myenv/bin/python b/myenv/bin/python new file mode 120000 index 000000000..b8a0adbbb --- /dev/null +++ b/myenv/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/myenv/bin/python3 b/myenv/bin/python3 new file mode 120000 index 000000000..ae65fdaa1 --- /dev/null +++ b/myenv/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/myenv/bin/python3.10 b/myenv/bin/python3.10 new file mode 120000 index 000000000..b8a0adbbb --- /dev/null +++ b/myenv/bin/python3.10 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/myenv/lib/python3.10/site-packages/IPython/__init__.py b/myenv/lib/python3.10/site-packages/IPython/__init__.py new file mode 100644 index 000000000..c224f9a8c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/__init__.py @@ -0,0 +1,156 @@ +# PYTHON_ARGCOMPLETE_OK +""" +IPython: tools for interactive and parallel computing in Python. + +https://ipython.org +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2008-2011, IPython Development Team. +# Copyright (c) 2001-2007, Fernando Perez +# Copyright (c) 2001, Janko Hauser +# Copyright (c) 2001, Nathaniel Gray +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys + +#----------------------------------------------------------------------------- +# Setup everything +#----------------------------------------------------------------------------- + +# Don't forget to also update setup.py when this changes! +if sys.version_info < (3, 8): + raise ImportError( + """ +IPython 8+ supports Python 3.8 and above, following NEP 29. +When using Python 2.7, please install IPython 5.x LTS Long Term Support version. +Python 3.3 and 3.4 were supported up to IPython 6.x. +Python 3.5 was supported with IPython 7.0 to 7.9. +Python 3.6 was supported with IPython up to 7.16. +Python 3.7 was still supported with the 7.x branch. + +See IPython `README.rst` file for more information: + + https://github.com/ipython/ipython/blob/main/README.rst + +""" + ) + +#----------------------------------------------------------------------------- +# Setup the top level names +#----------------------------------------------------------------------------- + +from .core.getipython import get_ipython +from .core import release +from .core.application import Application +from .terminal.embed import embed + +from .core.interactiveshell import InteractiveShell +from .utils.sysinfo import sys_info +from .utils.frame import extract_module_locals + +# Release data +__author__ = '%s <%s>' % (release.author, release.author_email) +__license__ = release.license +__version__ = release.version +version_info = release.version_info +# list of CVEs that should have been patched in this release. +# this is informational and should not be relied upon. +__patched_cves__ = {"CVE-2022-21699"} + + +def embed_kernel(module=None, local_ns=None, **kwargs): + """Embed and start an IPython kernel in a given scope. + + If you don't want the kernel to initialize the namespace + from the scope of the surrounding function, + and/or you want to load full IPython configuration, + you probably want `IPython.start_kernel()` instead. + + Parameters + ---------- + module : types.ModuleType, optional + The module to load into IPython globals (default: caller) + local_ns : dict, optional + The namespace to load into IPython user namespace (default: caller) + **kwargs : various, optional + Further keyword args are relayed to the IPKernelApp constructor, + allowing configuration of the Kernel. Will only have an effect + on the first embed_kernel call for a given process. + """ + + (caller_module, caller_locals) = extract_module_locals(1) + if module is None: + module = caller_module + if local_ns is None: + local_ns = caller_locals + + # Only import .zmq when we really need it + from ipykernel.embed import embed_kernel as real_embed_kernel + real_embed_kernel(module=module, local_ns=local_ns, **kwargs) + +def start_ipython(argv=None, **kwargs): + """Launch a normal IPython instance (as opposed to embedded) + + `IPython.embed()` puts a shell in a particular calling scope, + such as a function or method for debugging purposes, + which is often not desirable. + + `start_ipython()` does full, regular IPython initialization, + including loading startup files, configuration, etc. + much of which is skipped by `embed()`. + + This is a public API method, and will survive implementation changes. + + Parameters + ---------- + argv : list or None, optional + If unspecified or None, IPython will parse command-line options from sys.argv. + To prevent any command-line parsing, pass an empty list: `argv=[]`. + user_ns : dict, optional + specify this dictionary to initialize the IPython user namespace with particular values. + **kwargs : various, optional + Any other kwargs will be passed to the Application constructor, + such as `config`. + """ + from IPython.terminal.ipapp import launch_new_instance + return launch_new_instance(argv=argv, **kwargs) + +def start_kernel(argv=None, **kwargs): + """Launch a normal IPython kernel instance (as opposed to embedded) + + `IPython.embed_kernel()` puts a shell in a particular calling scope, + such as a function or method for debugging purposes, + which is often not desirable. + + `start_kernel()` does full, regular IPython initialization, + including loading startup files, configuration, etc. + much of which is skipped by `embed()`. + + Parameters + ---------- + argv : list or None, optional + If unspecified or None, IPython will parse command-line options from sys.argv. + To prevent any command-line parsing, pass an empty list: `argv=[]`. + user_ns : dict, optional + specify this dictionary to initialize the IPython user namespace with particular values. + **kwargs : various, optional + Any other kwargs will be passed to the Application constructor, + such as `config`. + """ + import warnings + + warnings.warn( + "start_kernel is deprecated since IPython 8.0, use from `ipykernel.kernelapp.launch_new_instance`", + DeprecationWarning, + stacklevel=2, + ) + from ipykernel.kernelapp import launch_new_instance + return launch_new_instance(argv=argv, **kwargs) diff --git a/myenv/lib/python3.10/site-packages/IPython/__main__.py b/myenv/lib/python3.10/site-packages/IPython/__main__.py new file mode 100644 index 000000000..8e9f989a8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/__main__.py @@ -0,0 +1,15 @@ +# PYTHON_ARGCOMPLETE_OK +# encoding: utf-8 +"""Terminal-based IPython entry point. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +from IPython import start_ipython + +start_ipython() diff --git a/myenv/lib/python3.10/site-packages/IPython/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..ac68d5f83 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/__pycache__/__main__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 000000000..5d8ae1d14 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/__pycache__/__main__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/__pycache__/conftest.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/__pycache__/conftest.cpython-310.pyc new file mode 100644 index 000000000..3da913f9b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/__pycache__/conftest.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/__pycache__/consoleapp.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/__pycache__/consoleapp.cpython-310.pyc new file mode 100644 index 000000000..9a9b6b42a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/__pycache__/consoleapp.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/__pycache__/display.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/__pycache__/display.cpython-310.pyc new file mode 100644 index 000000000..316aa2adc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/__pycache__/display.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/__pycache__/paths.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/__pycache__/paths.cpython-310.pyc new file mode 100644 index 000000000..1541e36b9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/__pycache__/paths.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/conftest.py b/myenv/lib/python3.10/site-packages/IPython/conftest.py new file mode 100644 index 000000000..abf613147 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/conftest.py @@ -0,0 +1,87 @@ +import builtins +import inspect +import os +import pathlib +import shutil +import sys +import types + +import pytest + +# Must register before it gets imported +pytest.register_assert_rewrite("IPython.testing.tools") + +from .testing import tools + + +def pytest_collection_modifyitems(items): + """This function is automatically run by pytest passing all collected test + functions. + + We use it to add asyncio marker to all async tests and assert we don't use + test functions that are async generators which wouldn't make sense. + """ + for item in items: + if inspect.iscoroutinefunction(item.obj): + item.add_marker("asyncio") + assert not inspect.isasyncgenfunction(item.obj) + + +def get_ipython(): + from .terminal.interactiveshell import TerminalInteractiveShell + if TerminalInteractiveShell._instance: + return TerminalInteractiveShell.instance() + + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + + # Create and initialize our test-friendly IPython instance. + shell = TerminalInteractiveShell.instance(config=config) + return shell + + +@pytest.fixture(scope='session', autouse=True) +def work_path(): + path = pathlib.Path("./tmp-ipython-pytest-profiledir") + os.environ["IPYTHONDIR"] = str(path.absolute()) + if path.exists(): + raise ValueError('IPython dir temporary path already exists ! Did previous test run exit successfully ?') + path.mkdir() + yield + shutil.rmtree(str(path.resolve())) + + +def nopage(strng, start=0, screen_lines=0, pager_cmd=None): + if isinstance(strng, dict): + strng = strng.get("text/plain", "") + print(strng) + + +def xsys(self, cmd): + """Replace the default system call with a capturing one for doctest. + """ + # We use getoutput, but we need to strip it because pexpect captures + # the trailing newline differently from commands.getoutput + print(self.getoutput(cmd, split=False, depth=1).rstrip(), end="", file=sys.stdout) + sys.stdout.flush() + + +# for things to work correctly we would need this as a session fixture; +# unfortunately this will fail on some test that get executed as _collection_ +# time (before the fixture run), in particular parametrized test that contain +# yields. so for now execute at import time. +#@pytest.fixture(autouse=True, scope='session') +def inject(): + + builtins.get_ipython = get_ipython + builtins._ip = get_ipython() + builtins.ip = get_ipython() + builtins.ip.system = types.MethodType(xsys, ip) + builtins.ip.builtin_trap.activate() + from .core import page + + page.pager_page = nopage + # yield + + +inject() diff --git a/myenv/lib/python3.10/site-packages/IPython/consoleapp.py b/myenv/lib/python3.10/site-packages/IPython/consoleapp.py new file mode 100644 index 000000000..c2bbe1888 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/consoleapp.py @@ -0,0 +1,12 @@ +""" +Shim to maintain backwards compatibility with old IPython.consoleapp imports. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from warnings import warn + +warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0." + "You should import from jupyter_client.consoleapp instead.", stacklevel=2) + +from jupyter_client.consoleapp import * diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__init__.py b/myenv/lib/python3.10/site-packages/IPython/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..51f1b322d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/alias.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/alias.cpython-310.pyc new file mode 100644 index 000000000..a80681077 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/alias.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/application.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/application.cpython-310.pyc new file mode 100644 index 000000000..7acd9e0fb Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/application.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/async_helpers.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/async_helpers.cpython-310.pyc new file mode 100644 index 000000000..b3f6f350d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/async_helpers.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/autocall.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/autocall.cpython-310.pyc new file mode 100644 index 000000000..834bf1f4b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/autocall.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/builtin_trap.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/builtin_trap.cpython-310.pyc new file mode 100644 index 000000000..23372cd0c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/builtin_trap.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/compilerop.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/compilerop.cpython-310.pyc new file mode 100644 index 000000000..11fead34b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/compilerop.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/completer.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/completer.cpython-310.pyc new file mode 100644 index 000000000..1c097974c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/completer.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/completerlib.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/completerlib.cpython-310.pyc new file mode 100644 index 000000000..f53fe69a0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/completerlib.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/crashhandler.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/crashhandler.cpython-310.pyc new file mode 100644 index 000000000..02f81fc70 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/crashhandler.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/debugger.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/debugger.cpython-310.pyc new file mode 100644 index 000000000..e26da6c63 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/debugger.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display.cpython-310.pyc new file mode 100644 index 000000000..d3c9e573b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display_functions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display_functions.cpython-310.pyc new file mode 100644 index 000000000..d395d6790 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display_functions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display_trap.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display_trap.cpython-310.pyc new file mode 100644 index 000000000..880132ad4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/display_trap.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/displayhook.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/displayhook.cpython-310.pyc new file mode 100644 index 000000000..8f18d8b84 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/displayhook.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/displaypub.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/displaypub.cpython-310.pyc new file mode 100644 index 000000000..6735d9358 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/displaypub.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/error.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/error.cpython-310.pyc new file mode 100644 index 000000000..0ffdb3b01 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/error.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/events.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/events.cpython-310.pyc new file mode 100644 index 000000000..631d12759 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/events.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/excolors.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/excolors.cpython-310.pyc new file mode 100644 index 000000000..6ff6a213a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/excolors.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/extensions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/extensions.cpython-310.pyc new file mode 100644 index 000000000..5504478bf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/extensions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/formatters.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/formatters.cpython-310.pyc new file mode 100644 index 000000000..b57468859 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/formatters.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/getipython.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/getipython.cpython-310.pyc new file mode 100644 index 000000000..98f306f27 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/getipython.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/guarded_eval.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/guarded_eval.cpython-310.pyc new file mode 100644 index 000000000..3c9ff4f8c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/guarded_eval.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/history.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/history.cpython-310.pyc new file mode 100644 index 000000000..a6e6e31a0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/history.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/historyapp.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/historyapp.cpython-310.pyc new file mode 100644 index 000000000..68d76d41e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/historyapp.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/hooks.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/hooks.cpython-310.pyc new file mode 100644 index 000000000..a39d26667 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/hooks.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputsplitter.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputsplitter.cpython-310.pyc new file mode 100644 index 000000000..378dd4a2f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputsplitter.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputtransformer.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputtransformer.cpython-310.pyc new file mode 100644 index 000000000..4ef4bfef2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputtransformer.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-310.pyc new file mode 100644 index 000000000..c65a8dfab Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/inputtransformer2.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/interactiveshell.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/interactiveshell.cpython-310.pyc new file mode 100644 index 000000000..6830c4df4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/interactiveshell.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/latex_symbols.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/latex_symbols.cpython-310.pyc new file mode 100644 index 000000000..d0fda8a41 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/latex_symbols.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/logger.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/logger.cpython-310.pyc new file mode 100644 index 000000000..e954ada5c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/logger.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/macro.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/macro.cpython-310.pyc new file mode 100644 index 000000000..ce3958d4f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/macro.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/magic.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/magic.cpython-310.pyc new file mode 100644 index 000000000..fd40d2f63 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/magic.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/magic_arguments.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/magic_arguments.cpython-310.pyc new file mode 100644 index 000000000..c47427ddb Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/magic_arguments.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/oinspect.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/oinspect.cpython-310.pyc new file mode 100644 index 000000000..e693bf6f0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/oinspect.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/page.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/page.cpython-310.pyc new file mode 100644 index 000000000..52c62f015 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/page.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/payload.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/payload.cpython-310.pyc new file mode 100644 index 000000000..44cfcdcb0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/payload.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/payloadpage.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/payloadpage.cpython-310.pyc new file mode 100644 index 000000000..9a7f29e11 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/payloadpage.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/prefilter.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/prefilter.cpython-310.pyc new file mode 100644 index 000000000..643ca23d9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/prefilter.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/profileapp.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/profileapp.cpython-310.pyc new file mode 100644 index 000000000..9ba665e84 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/profileapp.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/profiledir.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/profiledir.cpython-310.pyc new file mode 100644 index 000000000..4b96133bf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/profiledir.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/prompts.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/prompts.cpython-310.pyc new file mode 100644 index 000000000..1c7bd3a8f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/prompts.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/pylabtools.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/pylabtools.cpython-310.pyc new file mode 100644 index 000000000..a8bcc4ba9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/pylabtools.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/release.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/release.cpython-310.pyc new file mode 100644 index 000000000..6de04af88 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/release.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/shellapp.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/shellapp.cpython-310.pyc new file mode 100644 index 000000000..de649b651 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/shellapp.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/splitinput.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/splitinput.cpython-310.pyc new file mode 100644 index 000000000..0dfab8094 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/splitinput.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/ultratb.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/ultratb.cpython-310.pyc new file mode 100644 index 000000000..f168882ce Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/ultratb.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/usage.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/usage.cpython-310.pyc new file mode 100644 index 000000000..baeb972ee Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/core/__pycache__/usage.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/alias.py b/myenv/lib/python3.10/site-packages/IPython/core/alias.py new file mode 100644 index 000000000..2ad990231 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/alias.py @@ -0,0 +1,258 @@ +# encoding: utf-8 +""" +System command aliases. + +Authors: + +* Fernando Perez +* Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import re +import sys + +from traitlets.config.configurable import Configurable +from .error import UsageError + +from traitlets import List, Instance +from logging import error + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +# This is used as the pattern for calls to split_user_input. +shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)') + +def default_aliases(): + """Return list of shell aliases to auto-define. + """ + # Note: the aliases defined here should be safe to use on a kernel + # regardless of what frontend it is attached to. Frontends that use a + # kernel in-process can define additional aliases that will only work in + # their case. For example, things like 'less' or 'clear' that manipulate + # the terminal should NOT be declared here, as they will only work if the + # kernel is running inside a true terminal, and not over the network. + + if os.name == 'posix': + default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'), + ('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'), + ('cat', 'cat'), + ] + # Useful set of ls aliases. The GNU and BSD options are a little + # different, so we make aliases that provide as similar as possible + # behavior in ipython, by passing the right flags for each platform + if sys.platform.startswith('linux'): + ls_aliases = [('ls', 'ls -F --color'), + # long ls + ('ll', 'ls -F -o --color'), + # ls normal files only + ('lf', 'ls -F -o --color %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -o --color %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -o --color %l | grep /$'), + # things which are executable + ('lx', 'ls -F -o --color %l | grep ^-..x'), + ] + elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'): + # OpenBSD, NetBSD. The ls implementation on these platforms do not support + # the -G switch and lack the ability to use colorized output. + ls_aliases = [('ls', 'ls -F'), + # long ls + ('ll', 'ls -F -l'), + # ls normal files only + ('lf', 'ls -F -l %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -l %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -l %l | grep /$'), + # things which are executable + ('lx', 'ls -F -l %l | grep ^-..x'), + ] + else: + # BSD, OSX, etc. + ls_aliases = [('ls', 'ls -F -G'), + # long ls + ('ll', 'ls -F -l -G'), + # ls normal files only + ('lf', 'ls -F -l -G %l | grep ^-'), + # ls symbolic links + ('lk', 'ls -F -l -G %l | grep ^l'), + # directories or links to directories, + ('ldir', 'ls -F -G -l %l | grep /$'), + # things which are executable + ('lx', 'ls -F -l -G %l | grep ^-..x'), + ] + default_aliases = default_aliases + ls_aliases + elif os.name in ['nt', 'dos']: + default_aliases = [('ls', 'dir /on'), + ('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'), + ('mkdir', 'mkdir'), ('rmdir', 'rmdir'), + ('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'), + ] + else: + default_aliases = [] + + return default_aliases + + +class AliasError(Exception): + pass + + +class InvalidAliasError(AliasError): + pass + +class Alias(object): + """Callable object storing the details of one alias. + + Instances are registered as magic functions to allow use of aliases. + """ + + # Prepare blacklist + blacklist = {'cd','popd','pushd','dhist','alias','unalias'} + + def __init__(self, shell, name, cmd): + self.shell = shell + self.name = name + self.cmd = cmd + self.__doc__ = "Alias for `!{}`".format(cmd) + self.nargs = self.validate() + + def validate(self): + """Validate the alias, and return the number of arguments.""" + if self.name in self.blacklist: + raise InvalidAliasError("The name %s can't be aliased " + "because it is a keyword or builtin." % self.name) + try: + caller = self.shell.magics_manager.magics['line'][self.name] + except KeyError: + pass + else: + if not isinstance(caller, Alias): + raise InvalidAliasError("The name %s can't be aliased " + "because it is another magic command." % self.name) + + if not (isinstance(self.cmd, str)): + raise InvalidAliasError("An alias command must be a string, " + "got: %r" % self.cmd) + + nargs = self.cmd.count('%s') - self.cmd.count('%%s') + + if (nargs > 0) and (self.cmd.find('%l') >= 0): + raise InvalidAliasError('The %s and %l specifiers are mutually ' + 'exclusive in alias definitions.') + + return nargs + + def __repr__(self): + return "".format(self.name, self.cmd) + + def __call__(self, rest=''): + cmd = self.cmd + nargs = self.nargs + # Expand the %l special to be the user's input line + if cmd.find('%l') >= 0: + cmd = cmd.replace('%l', rest) + rest = '' + + if nargs==0: + if cmd.find('%%s') >= 1: + cmd = cmd.replace('%%s', '%s') + # Simple, argument-less aliases + cmd = '%s %s' % (cmd, rest) + else: + # Handle aliases with positional arguments + args = rest.split(None, nargs) + if len(args) < nargs: + raise UsageError('Alias <%s> requires %s arguments, %s given.' % + (self.name, nargs, len(args))) + cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:])) + + self.shell.system(cmd) + +#----------------------------------------------------------------------------- +# Main AliasManager class +#----------------------------------------------------------------------------- + +class AliasManager(Configurable): + + default_aliases = List(default_aliases()).tag(config=True) + user_aliases = List(default_value=[]).tag(config=True) + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) + + def __init__(self, shell=None, **kwargs): + super(AliasManager, self).__init__(shell=shell, **kwargs) + # For convenient access + self.linemagics = self.shell.magics_manager.magics['line'] + self.init_aliases() + + def init_aliases(self): + # Load default & user aliases + for name, cmd in self.default_aliases + self.user_aliases: + if cmd.startswith('ls ') and self.shell.colors == 'NoColor': + cmd = cmd.replace(' --color', '') + self.soft_define_alias(name, cmd) + + @property + def aliases(self): + return [(n, func.cmd) for (n, func) in self.linemagics.items() + if isinstance(func, Alias)] + + def soft_define_alias(self, name, cmd): + """Define an alias, but don't raise on an AliasError.""" + try: + self.define_alias(name, cmd) + except AliasError as e: + error("Invalid alias: %s" % e) + + def define_alias(self, name, cmd): + """Define a new alias after validating it. + + This will raise an :exc:`AliasError` if there are validation + problems. + """ + caller = Alias(shell=self.shell, name=name, cmd=cmd) + self.shell.magics_manager.register_function(caller, magic_kind='line', + magic_name=name) + + def get_alias(self, name): + """Return an alias, or None if no alias by that name exists.""" + aname = self.linemagics.get(name, None) + return aname if isinstance(aname, Alias) else None + + def is_alias(self, name): + """Return whether or not a given name has been defined as an alias""" + return self.get_alias(name) is not None + + def undefine_alias(self, name): + if self.is_alias(name): + del self.linemagics[name] + else: + raise ValueError('%s is not an alias' % name) + + def clear_aliases(self): + for name, cmd in self.aliases: + self.undefine_alias(name) + + def retrieve_alias(self, name): + """Retrieve the command to which an alias expands.""" + caller = self.get_alias(name) + if caller: + return caller.cmd + else: + raise ValueError('%s is not an alias' % name) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/application.py b/myenv/lib/python3.10/site-packages/IPython/core/application.py new file mode 100644 index 000000000..26c061661 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/application.py @@ -0,0 +1,489 @@ +# encoding: utf-8 +""" +An application for IPython. + +All top-level applications should use the classes in this module for +handling configuration and creating configurables. + +The job of an :class:`Application` is to create the master configuration +object and then create the configurable objects, passing the config to them. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import atexit +from copy import deepcopy +import logging +import os +import shutil +import sys + +from pathlib import Path + +from traitlets.config.application import Application, catch_config_error +from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader +from IPython.core import release, crashhandler +from IPython.core.profiledir import ProfileDir, ProfileDirError +from IPython.paths import get_ipython_dir, get_ipython_package_dir +from IPython.utils.path import ensure_dir_exists +from traitlets import ( + List, Unicode, Type, Bool, Set, Instance, Undefined, + default, observe, +) + +if os.name == "nt": + programdata = os.environ.get("PROGRAMDATA", None) + if programdata is not None: + SYSTEM_CONFIG_DIRS = [str(Path(programdata) / "ipython")] + else: # PROGRAMDATA is not defined by default on XP. + SYSTEM_CONFIG_DIRS = [] +else: + SYSTEM_CONFIG_DIRS = [ + "/usr/local/etc/ipython", + "/etc/ipython", + ] + + +ENV_CONFIG_DIRS = [] +_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython') +if _env_config_dir not in SYSTEM_CONFIG_DIRS: + # only add ENV_CONFIG if sys.prefix is not already included + ENV_CONFIG_DIRS.append(_env_config_dir) + + +_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS') +if _envvar in {None, ''}: + IPYTHON_SUPPRESS_CONFIG_ERRORS = None +else: + if _envvar.lower() in {'1','true'}: + IPYTHON_SUPPRESS_CONFIG_ERRORS = True + elif _envvar.lower() in {'0','false'} : + IPYTHON_SUPPRESS_CONFIG_ERRORS = False + else: + sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar ) + +# aliases and flags + +base_aliases = {} +if isinstance(Application.aliases, dict): + # traitlets 5 + base_aliases.update(Application.aliases) +base_aliases.update( + { + "profile-dir": "ProfileDir.location", + "profile": "BaseIPythonApplication.profile", + "ipython-dir": "BaseIPythonApplication.ipython_dir", + "log-level": "Application.log_level", + "config": "BaseIPythonApplication.extra_config_file", + } +) + +base_flags = dict() +if isinstance(Application.flags, dict): + # traitlets 5 + base_flags.update(Application.flags) +base_flags.update( + dict( + debug=( + {"Application": {"log_level": logging.DEBUG}}, + "set log level to logging.DEBUG (maximize logging output)", + ), + quiet=( + {"Application": {"log_level": logging.CRITICAL}}, + "set log level to logging.CRITICAL (minimize logging output)", + ), + init=( + { + "BaseIPythonApplication": { + "copy_config_files": True, + "auto_create": True, + } + }, + """Initialize profile with default config files. This is equivalent + to running `ipython profile create ` prior to startup. + """, + ), + ) +) + + +class ProfileAwareConfigLoader(PyFileConfigLoader): + """A Python file config loader that is aware of IPython profiles.""" + def load_subconfig(self, fname, path=None, profile=None): + if profile is not None: + try: + profile_dir = ProfileDir.find_profile_dir_by_name( + get_ipython_dir(), + profile, + ) + except ProfileDirError: + return + path = profile_dir.location + return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path) + +class BaseIPythonApplication(Application): + + name = u'ipython' + description = Unicode(u'IPython: an enhanced interactive Python shell.') + version = Unicode(release.version) + + aliases = base_aliases + flags = base_flags + classes = List([ProfileDir]) + + # enable `load_subconfig('cfg.py', profile='name')` + python_config_loader_class = ProfileAwareConfigLoader + + # Track whether the config_file has changed, + # because some logic happens only if we aren't using the default. + config_file_specified = Set() + + config_file_name = Unicode() + @default('config_file_name') + def _config_file_name_default(self): + return self.name.replace('-','_') + u'_config.py' + @observe('config_file_name') + def _config_file_name_changed(self, change): + if change['new'] != change['old']: + self.config_file_specified.add(change['new']) + + # The directory that contains IPython's builtin profiles. + builtin_profile_dir = Unicode( + os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default') + ) + + config_file_paths = List(Unicode()) + @default('config_file_paths') + def _config_file_paths_default(self): + return [] + + extra_config_file = Unicode( + help="""Path to an extra config file to load. + + If specified, load this config file in addition to any other IPython config. + """).tag(config=True) + @observe('extra_config_file') + def _extra_config_file_changed(self, change): + old = change['old'] + new = change['new'] + try: + self.config_files.remove(old) + except ValueError: + pass + self.config_file_specified.add(new) + self.config_files.append(new) + + profile = Unicode(u'default', + help="""The IPython profile to use.""" + ).tag(config=True) + + @observe('profile') + def _profile_changed(self, change): + self.builtin_profile_dir = os.path.join( + get_ipython_package_dir(), u'config', u'profile', change['new'] + ) + + add_ipython_dir_to_sys_path = Bool( + False, + """Should the IPython profile directory be added to sys path ? + + This option was non-existing before IPython 8.0, and ipython_dir was added to + sys path to allow import of extensions present there. This was historical + baggage from when pip did not exist. This now default to false, + but can be set to true for legacy reasons. + """, + ).tag(config=True) + + ipython_dir = Unicode( + help=""" + The name of the IPython directory. This directory is used for logging + configuration (through profiles), history storage, etc. The default + is usually $HOME/.ipython. This option can also be specified through + the environment variable IPYTHONDIR. + """ + ).tag(config=True) + @default('ipython_dir') + def _ipython_dir_default(self): + d = get_ipython_dir() + self._ipython_dir_changed({ + 'name': 'ipython_dir', + 'old': d, + 'new': d, + }) + return d + + _in_init_profile_dir = False + profile_dir = Instance(ProfileDir, allow_none=True) + @default('profile_dir') + def _profile_dir_default(self): + # avoid recursion + if self._in_init_profile_dir: + return + # profile_dir requested early, force initialization + self.init_profile_dir() + return self.profile_dir + + overwrite = Bool(False, + help="""Whether to overwrite existing config files when copying""" + ).tag(config=True) + auto_create = Bool(False, + help="""Whether to create profile dir if it doesn't exist""" + ).tag(config=True) + + config_files = List(Unicode()) + @default('config_files') + def _config_files_default(self): + return [self.config_file_name] + + copy_config_files = Bool(False, + help="""Whether to install the default config files into the profile dir. + If a new profile is being created, and IPython contains config files for that + profile, then they will be staged into the new directory. Otherwise, + default config files will be automatically generated. + """).tag(config=True) + + verbose_crash = Bool(False, + help="""Create a massive crash report when IPython encounters what may be an + internal error. The default is to append a short message to the + usual traceback""").tag(config=True) + + # The class to use as the crash handler. + crash_handler_class = Type(crashhandler.CrashHandler) + + @catch_config_error + def __init__(self, **kwargs): + super(BaseIPythonApplication, self).__init__(**kwargs) + # ensure current working directory exists + try: + os.getcwd() + except: + # exit if cwd doesn't exist + self.log.error("Current working directory doesn't exist.") + self.exit(1) + + #------------------------------------------------------------------------- + # Various stages of Application creation + #------------------------------------------------------------------------- + + def init_crash_handler(self): + """Create a crash handler, typically setting sys.excepthook to it.""" + self.crash_handler = self.crash_handler_class(self) + sys.excepthook = self.excepthook + def unset_crashhandler(): + sys.excepthook = sys.__excepthook__ + atexit.register(unset_crashhandler) + + def excepthook(self, etype, evalue, tb): + """this is sys.excepthook after init_crashhandler + + set self.verbose_crash=True to use our full crashhandler, instead of + a regular traceback with a short message (crash_handler_lite) + """ + + if self.verbose_crash: + return self.crash_handler(etype, evalue, tb) + else: + return crashhandler.crash_handler_lite(etype, evalue, tb) + + @observe('ipython_dir') + def _ipython_dir_changed(self, change): + old = change['old'] + new = change['new'] + if old is not Undefined: + str_old = os.path.abspath(old) + if str_old in sys.path: + sys.path.remove(str_old) + if self.add_ipython_dir_to_sys_path: + str_path = os.path.abspath(new) + sys.path.append(str_path) + ensure_dir_exists(new) + readme = os.path.join(new, "README") + readme_src = os.path.join( + get_ipython_package_dir(), "config", "profile", "README" + ) + if not os.path.exists(readme) and os.path.exists(readme_src): + shutil.copy(readme_src, readme) + for d in ("extensions", "nbextensions"): + path = os.path.join(new, d) + try: + ensure_dir_exists(path) + except OSError as e: + # this will not be EEXIST + self.log.error("couldn't create path %s: %s", path, e) + self.log.debug("IPYTHONDIR set to: %s" % new) + + def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): + """Load the config file. + + By default, errors in loading config are handled, and a warning + printed on screen. For testing, the suppress_errors option is set + to False, so errors will make tests fail. + + `suppress_errors` default value is to be `None` in which case the + behavior default to the one of `traitlets.Application`. + + The default value can be set : + - to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive). + - to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive). + - to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset. + + Any other value are invalid, and will make IPython exit with a non-zero return code. + """ + + + self.log.debug("Searching path %s for config files", self.config_file_paths) + base_config = 'ipython_config.py' + self.log.debug("Attempting to load config file: %s" % + base_config) + try: + if suppress_errors is not None: + old_value = Application.raise_config_file_errors + Application.raise_config_file_errors = not suppress_errors; + Application.load_config_file( + self, + base_config, + path=self.config_file_paths + ) + except ConfigFileNotFound: + # ignore errors loading parent + self.log.debug("Config file %s not found", base_config) + pass + if suppress_errors is not None: + Application.raise_config_file_errors = old_value + + for config_file_name in self.config_files: + if not config_file_name or config_file_name == base_config: + continue + self.log.debug("Attempting to load config file: %s" % + self.config_file_name) + try: + Application.load_config_file( + self, + config_file_name, + path=self.config_file_paths + ) + except ConfigFileNotFound: + # Only warn if the default config file was NOT being used. + if config_file_name in self.config_file_specified: + msg = self.log.warning + else: + msg = self.log.debug + msg("Config file not found, skipping: %s", config_file_name) + except Exception: + # For testing purposes. + if not suppress_errors: + raise + self.log.warning("Error loading config file: %s" % + self.config_file_name, exc_info=True) + + def init_profile_dir(self): + """initialize the profile dir""" + self._in_init_profile_dir = True + if self.profile_dir is not None: + # already ran + return + if 'ProfileDir.location' not in self.config: + # location not specified, find by profile name + try: + p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config) + except ProfileDirError: + # not found, maybe create it (always create default profile) + if self.auto_create or self.profile == 'default': + try: + p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config) + except ProfileDirError: + self.log.fatal("Could not create profile: %r"%self.profile) + self.exit(1) + else: + self.log.info("Created profile dir: %r"%p.location) + else: + self.log.fatal("Profile %r not found."%self.profile) + self.exit(1) + else: + self.log.debug(f"Using existing profile dir: {p.location!r}") + else: + location = self.config.ProfileDir.location + # location is fully specified + try: + p = ProfileDir.find_profile_dir(location, self.config) + except ProfileDirError: + # not found, maybe create it + if self.auto_create: + try: + p = ProfileDir.create_profile_dir(location, self.config) + except ProfileDirError: + self.log.fatal("Could not create profile directory: %r"%location) + self.exit(1) + else: + self.log.debug("Creating new profile dir: %r"%location) + else: + self.log.fatal("Profile directory %r not found."%location) + self.exit(1) + else: + self.log.debug(f"Using existing profile dir: {p.location!r}") + # if profile_dir is specified explicitly, set profile name + dir_name = os.path.basename(p.location) + if dir_name.startswith('profile_'): + self.profile = dir_name[8:] + + self.profile_dir = p + self.config_file_paths.append(p.location) + self._in_init_profile_dir = False + + def init_config_files(self): + """[optionally] copy default config files into profile dir.""" + self.config_file_paths.extend(ENV_CONFIG_DIRS) + self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) + # copy config files + path = Path(self.builtin_profile_dir) + if self.copy_config_files: + src = self.profile + + cfg = self.config_file_name + if path and (path / cfg).exists(): + self.log.warning( + "Staging %r from %s into %r [overwrite=%s]" + % (cfg, src, self.profile_dir.location, self.overwrite) + ) + self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) + else: + self.stage_default_config_file() + else: + # Still stage *bundled* config files, but not generated ones + # This is necessary for `ipython profile=sympy` to load the profile + # on the first go + files = path.glob("*.py") + for fullpath in files: + cfg = fullpath.name + if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): + # file was copied + self.log.warning("Staging bundled %s from %s into %r"%( + cfg, self.profile, self.profile_dir.location) + ) + + + def stage_default_config_file(self): + """auto generate default config file, and stage it into the profile.""" + s = self.generate_config_file() + config_file = Path(self.profile_dir.location) / self.config_file_name + if self.overwrite or not config_file.exists(): + self.log.warning("Generating default config file: %r" % (config_file)) + config_file.write_text(s, encoding="utf-8") + + @catch_config_error + def initialize(self, argv=None): + # don't hook up crash handler before parsing command-line + self.parse_command_line(argv) + self.init_crash_handler() + if self.subapp is not None: + # stop here if subapp is taking over + return + # save a copy of CLI config to re-load after config files + # so that it has highest priority + cl_config = deepcopy(self.config) + self.init_profile_dir() + self.init_config_files() + self.load_config_file() + # enforce cl-opts override configfile opts: + self.update_config(cl_config) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/async_helpers.py b/myenv/lib/python3.10/site-packages/IPython/core/async_helpers.py new file mode 100644 index 000000000..0e7db0bb5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/async_helpers.py @@ -0,0 +1,156 @@ +""" +Async helper function that are invalid syntax on Python 3.5 and below. + +This code is best effort, and may have edge cases not behaving as expected. In +particular it contain a number of heuristics to detect whether code is +effectively async and need to run in an event loop or not. + +Some constructs (like top-level `return`, or `yield`) are taken care of +explicitly to actually raise a SyntaxError and stay as close as possible to +Python semantics. +""" + + +import ast +import asyncio +import inspect +from functools import wraps + +_asyncio_event_loop = None + + +def get_asyncio_loop(): + """asyncio has deprecated get_event_loop + + Replicate it here, with our desired semantics: + + - always returns a valid, not-closed loop + - not thread-local like asyncio's, + because we only want one loop for IPython + - if called from inside a coroutine (e.g. in ipykernel), + return the running loop + + .. versionadded:: 8.0 + """ + try: + return asyncio.get_running_loop() + except RuntimeError: + # not inside a coroutine, + # track our own global + pass + + # not thread-local like asyncio's, + # because we only track one event loop to run for IPython itself, + # always in the main thread. + global _asyncio_event_loop + if _asyncio_event_loop is None or _asyncio_event_loop.is_closed(): + _asyncio_event_loop = asyncio.new_event_loop() + return _asyncio_event_loop + + +class _AsyncIORunner: + def __call__(self, coro): + """ + Handler for asyncio autoawait + """ + return get_asyncio_loop().run_until_complete(coro) + + def __str__(self): + return "asyncio" + + +_asyncio_runner = _AsyncIORunner() + + +class _AsyncIOProxy: + """Proxy-object for an asyncio + + Any coroutine methods will be wrapped in event_loop.run_ + """ + + def __init__(self, obj, event_loop): + self._obj = obj + self._event_loop = event_loop + + def __repr__(self): + return f"<_AsyncIOProxy({self._obj!r})>" + + def __getattr__(self, key): + attr = getattr(self._obj, key) + if inspect.iscoroutinefunction(attr): + # if it's a coroutine method, + # return a threadsafe wrapper onto the _current_ asyncio loop + @wraps(attr) + def _wrapped(*args, **kwargs): + concurrent_future = asyncio.run_coroutine_threadsafe( + attr(*args, **kwargs), self._event_loop + ) + return asyncio.wrap_future(concurrent_future) + + return _wrapped + else: + return attr + + def __dir__(self): + return dir(self._obj) + + +def _curio_runner(coroutine): + """ + handler for curio autoawait + """ + import curio + + return curio.run(coroutine) + + +def _trio_runner(async_fn): + import trio + + async def loc(coro): + """ + We need the dummy no-op async def to protect from + trio's internal. See https://github.com/python-trio/trio/issues/89 + """ + return await coro + + return trio.run(loc, async_fn) + + +def _pseudo_sync_runner(coro): + """ + A runner that does not really allow async execution, and just advance the coroutine. + + See discussion in https://github.com/python-trio/trio/issues/608, + + Credit to Nathaniel Smith + """ + try: + coro.send(None) + except StopIteration as exc: + return exc.value + else: + # TODO: do not raise but return an execution result with the right info. + raise RuntimeError( + "{coro_name!r} needs a real async loop".format(coro_name=coro.__name__) + ) + + +def _should_be_async(cell: str) -> bool: + """Detect if a block of code need to be wrapped in an `async def` + + Attempt to parse the block of code, it it compile we're fine. + Otherwise we wrap if and try to compile. + + If it works, assume it should be async. Otherwise Return False. + + Not handled yet: If the block of code has a return statement as the top + level, it will be seen as async. This is a know limitation. + """ + try: + code = compile( + cell, "<>", "exec", flags=getattr(ast, "PyCF_ALLOW_TOP_LEVEL_AWAIT", 0x0) + ) + return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE + except (SyntaxError, MemoryError): + return False diff --git a/myenv/lib/python3.10/site-packages/IPython/core/autocall.py b/myenv/lib/python3.10/site-packages/IPython/core/autocall.py new file mode 100644 index 000000000..54beec3f5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/autocall.py @@ -0,0 +1,70 @@ +# encoding: utf-8 +""" +Autocall capabilities for IPython.core. + +Authors: + +* Brian Granger +* Fernando Perez +* Thomas Kluyver + +Notes +----- +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +class IPyAutocall(object): + """ Instances of this class are always autocalled + + This happens regardless of 'autocall' variable state. Use this to + develop macro-like mechanisms. + """ + _ip = None + rewrite = True + def __init__(self, ip=None): + self._ip = ip + + def set_ip(self, ip): + """Will be used to set _ip point to current ipython instance b/f call + + Override this method if you don't want this to happen. + + """ + self._ip = ip + + +class ExitAutocall(IPyAutocall): + """An autocallable object which will be added to the user namespace so that + exit, exit(), quit or quit() are all valid ways to close the shell.""" + rewrite = False + + def __call__(self): + self._ip.ask_exit() + +class ZMQExitAutocall(ExitAutocall): + """Exit IPython. Autocallable, so it needn't be explicitly called. + + Parameters + ---------- + keep_kernel : bool + If True, leave the kernel alive. Otherwise, tell the kernel to exit too + (default). + """ + def __call__(self, keep_kernel=False): + self._ip.keepkernel_on_exit = keep_kernel + self._ip.ask_exit() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/builtin_trap.py b/myenv/lib/python3.10/site-packages/IPython/core/builtin_trap.py new file mode 100644 index 000000000..a8ea4abcd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/builtin_trap.py @@ -0,0 +1,86 @@ +""" +A context manager for managing things injected into :mod:`builtins`. +""" +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. +import builtins as builtin_mod + +from traitlets.config.configurable import Configurable + +from traitlets import Instance + + +class __BuiltinUndefined(object): pass +BuiltinUndefined = __BuiltinUndefined() + +class __HideBuiltin(object): pass +HideBuiltin = __HideBuiltin() + + +class BuiltinTrap(Configurable): + + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) + + def __init__(self, shell=None): + super(BuiltinTrap, self).__init__(shell=shell, config=None) + self._orig_builtins = {} + # We define this to track if a single BuiltinTrap is nested. + # Only turn off the trap when the outermost call to __exit__ is made. + self._nested_level = 0 + self.shell = shell + # builtins we always add - if set to HideBuiltin, they will just + # be removed instead of being replaced by something else + self.auto_builtins = {'exit': HideBuiltin, + 'quit': HideBuiltin, + 'get_ipython': self.shell.get_ipython, + } + + def __enter__(self): + if self._nested_level == 0: + self.activate() + self._nested_level += 1 + # I return self, so callers can use add_builtin in a with clause. + return self + + def __exit__(self, type, value, traceback): + if self._nested_level == 1: + self.deactivate() + self._nested_level -= 1 + # Returning False will cause exceptions to propagate + return False + + def add_builtin(self, key, value): + """Add a builtin and save the original.""" + bdict = builtin_mod.__dict__ + orig = bdict.get(key, BuiltinUndefined) + if value is HideBuiltin: + if orig is not BuiltinUndefined: #same as 'key in bdict' + self._orig_builtins[key] = orig + del bdict[key] + else: + self._orig_builtins[key] = orig + bdict[key] = value + + def remove_builtin(self, key, orig): + """Remove an added builtin and re-set the original.""" + if orig is BuiltinUndefined: + del builtin_mod.__dict__[key] + else: + builtin_mod.__dict__[key] = orig + + def activate(self): + """Store ipython references in the __builtin__ namespace.""" + + add_builtin = self.add_builtin + for name, func in self.auto_builtins.items(): + add_builtin(name, func) + + def deactivate(self): + """Remove any builtins which might have been added by add_builtins, or + restore overwritten ones to their previous values.""" + remove_builtin = self.remove_builtin + for key, val in self._orig_builtins.items(): + remove_builtin(key, val) + self._orig_builtins.clear() + self._builtins_added = False diff --git a/myenv/lib/python3.10/site-packages/IPython/core/compilerop.py b/myenv/lib/python3.10/site-packages/IPython/core/compilerop.py new file mode 100644 index 000000000..7799a4fc9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/compilerop.py @@ -0,0 +1,214 @@ +"""Compiler tools with improved interactive support. + +Provides compilation machinery similar to codeop, but with caching support so +we can provide interactive tracebacks. + +Authors +------- +* Robert Kern +* Fernando Perez +* Thomas Kluyver +""" + +# Note: though it might be more natural to name this module 'compiler', that +# name is in the stdlib and name collisions with the stdlib tend to produce +# weird problems (often with third-party tools). + +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import __future__ +from ast import PyCF_ONLY_AST +import codeop +import functools +import hashlib +import linecache +import operator +import time +from contextlib import contextmanager + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h, +# this is used as a bitmask to extract future-related code flags. +PyCF_MASK = functools.reduce(operator.or_, + (getattr(__future__, fname).compiler_flag + for fname in __future__.all_feature_names)) + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +def code_name(code, number=0): + """ Compute a (probably) unique name for code for caching. + + This now expects code to be unicode. + """ + hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest() + # Include the number and 12 characters of the hash in the name. It's + # pretty much impossible that in a single session we'll have collisions + # even with truncated hashes, and the full one makes tracebacks too long + return ''.format(number, hash_digest[:12]) + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +class CachingCompiler(codeop.Compile): + """A compiler that caches code compiled from interactive statements. + """ + + def __init__(self): + codeop.Compile.__init__(self) + + # Caching a dictionary { filename: execution_count } for nicely + # rendered tracebacks. The filename corresponds to the filename + # argument used for the builtins.compile function. + self._filename_map = {} + + def ast_parse(self, source, filename='', symbol='exec'): + """Parse code to an AST with the current compiler flags active. + + Arguments are exactly the same as ast.parse (in the standard library), + and are passed to the built-in compile function.""" + return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1) + + def reset_compiler_flags(self): + """Reset compiler flags to default state.""" + # This value is copied from codeop.Compile.__init__, so if that ever + # changes, it will need to be updated. + self.flags = codeop.PyCF_DONT_IMPLY_DEDENT + + @property + def compiler_flags(self): + """Flags currently active in the compilation process. + """ + return self.flags + + def get_code_name(self, raw_code, transformed_code, number): + """Compute filename given the code, and the cell number. + + Parameters + ---------- + raw_code : str + The raw cell code. + transformed_code : str + The executable Python source code to cache and compile. + number : int + A number which forms part of the code's name. Used for the execution + counter. + + Returns + ------- + The computed filename. + """ + return code_name(transformed_code, number) + + def format_code_name(self, name): + """Return a user-friendly label and name for a code block. + + Parameters + ---------- + name : str + The name for the code block returned from get_code_name + + Returns + ------- + A (label, name) pair that can be used in tracebacks, or None if the default formatting should be used. + """ + if name in self._filename_map: + return "Cell", "In[%s]" % self._filename_map[name] + + def cache(self, transformed_code, number=0, raw_code=None): + """Make a name for a block of code, and cache the code. + + Parameters + ---------- + transformed_code : str + The executable Python source code to cache and compile. + number : int + A number which forms part of the code's name. Used for the execution + counter. + raw_code : str + The raw code before transformation, if None, set to `transformed_code`. + + Returns + ------- + The name of the cached code (as a string). Pass this as the filename + argument to compilation, so that tracebacks are correctly hooked up. + """ + if raw_code is None: + raw_code = transformed_code + + name = self.get_code_name(raw_code, transformed_code, number) + + # Save the execution count + self._filename_map[name] = number + + # Since Python 2.5, setting mtime to `None` means the lines will + # never be removed by `linecache.checkcache`. This means all the + # monkeypatching has *never* been necessary, since this code was + # only added in 2010, at which point IPython had already stopped + # supporting Python 2.4. + # + # Note that `linecache.clearcache` and `linecache.updatecache` may + # still remove our code from the cache, but those show explicit + # intent, and we should not try to interfere. Normally the former + # is never called except when out of memory, and the latter is only + # called for lines *not* in the cache. + entry = ( + len(transformed_code), + None, + [line + "\n" for line in transformed_code.splitlines()], + name, + ) + linecache.cache[name] = entry + return name + + @contextmanager + def extra_flags(self, flags): + ## bits that we'll set to 1 + turn_on_bits = ~self.flags & flags + + + self.flags = self.flags | flags + try: + yield + finally: + # turn off only the bits we turned on so that something like + # __future__ that set flags stays. + self.flags &= ~turn_on_bits + + +def check_linecache_ipython(*args): + """Deprecated since IPython 8.6. Call linecache.checkcache() directly. + + It was already not necessary to call this function directly. If no + CachingCompiler had been created, this function would fail badly. If + an instance had been created, this function would've been monkeypatched + into place. + + As of IPython 8.6, the monkeypatching has gone away entirely. But there + were still internal callers of this function, so maybe external callers + also existed? + """ + import warnings + + warnings.warn( + "Deprecated Since IPython 8.6, Just call linecache.checkcache() directly.", + DeprecationWarning, + stacklevel=2, + ) + linecache.checkcache() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/completer.py b/myenv/lib/python3.10/site-packages/IPython/core/completer.py new file mode 100644 index 000000000..f0bbb4e56 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/completer.py @@ -0,0 +1,3322 @@ +"""Completion for IPython. + +This module started as fork of the rlcompleter module in the Python standard +library. The original enhancements made to rlcompleter have been sent +upstream and were accepted as of Python 2.3, + +This module now support a wide variety of completion mechanism both available +for normal classic Python code, as well as completer for IPython specific +Syntax like magics. + +Latex and Unicode completion +============================ + +IPython and compatible frontends not only can complete your code, but can help +you to input a wide range of characters. In particular we allow you to insert +a unicode character using the tab completion mechanism. + +Forward latex/unicode completion +-------------------------------- + +Forward completion allows you to easily type a unicode character using its latex +name, or unicode long description. To do so type a backslash follow by the +relevant name and press tab: + + +Using latex completion: + +.. code:: + + \\alpha + α + +or using unicode completion: + + +.. code:: + + \\GREEK SMALL LETTER ALPHA + α + + +Only valid Python identifiers will complete. Combining characters (like arrow or +dots) are also available, unlike latex they need to be put after the their +counterpart that is to say, ``F\\\\vec`` is correct, not ``\\\\vecF``. + +Some browsers are known to display combining characters incorrectly. + +Backward latex completion +------------------------- + +It is sometime challenging to know how to type a character, if you are using +IPython, or any compatible frontend you can prepend backslash to the character +and press :kbd:`Tab` to expand it to its latex form. + +.. code:: + + \\α + \\alpha + + +Both forward and backward completions can be deactivated by setting the +:std:configtrait:`Completer.backslash_combining_completions` option to +``False``. + + +Experimental +============ + +Starting with IPython 6.0, this module can make use of the Jedi library to +generate completions both using static analysis of the code, and dynamically +inspecting multiple namespaces. Jedi is an autocompletion and static analysis +for Python. The APIs attached to this new mechanism is unstable and will +raise unless use in an :any:`provisionalcompleter` context manager. + +You will find that the following are experimental: + + - :any:`provisionalcompleter` + - :any:`IPCompleter.completions` + - :any:`Completion` + - :any:`rectify_completions` + +.. note:: + + better name for :any:`rectify_completions` ? + +We welcome any feedback on these new API, and we also encourage you to try this +module in debug mode (start IPython with ``--Completer.debug=True``) in order +to have extra logging information if :any:`jedi` is crashing, or if current +IPython completer pending deprecations are returning results not yet handled +by :any:`jedi` + +Using Jedi for tab completion allow snippets like the following to work without +having to execute any code: + + >>> myvar = ['hello', 42] + ... myvar[1].bi + +Tab completion will be able to infer that ``myvar[1]`` is a real number without +executing almost any code unlike the deprecated :any:`IPCompleter.greedy` +option. + +Be sure to update :any:`jedi` to the latest stable version or to try the +current development version to get better completions. + +Matchers +======== + +All completions routines are implemented using unified *Matchers* API. +The matchers API is provisional and subject to change without notice. + +The built-in matchers include: + +- :any:`IPCompleter.dict_key_matcher`: dictionary key completions, +- :any:`IPCompleter.magic_matcher`: completions for magics, +- :any:`IPCompleter.unicode_name_matcher`, + :any:`IPCompleter.fwd_unicode_matcher` + and :any:`IPCompleter.latex_name_matcher`: see `Forward latex/unicode completion`_, +- :any:`back_unicode_name_matcher` and :any:`back_latex_name_matcher`: see `Backward latex completion`_, +- :any:`IPCompleter.file_matcher`: paths to files and directories, +- :any:`IPCompleter.python_func_kw_matcher` - function keywords, +- :any:`IPCompleter.python_matches` - globals and attributes (v1 API), +- ``IPCompleter.jedi_matcher`` - static analysis with Jedi, +- :any:`IPCompleter.custom_completer_matcher` - pluggable completer with a default + implementation in :any:`InteractiveShell` which uses IPython hooks system + (`complete_command`) with string dispatch (including regular expressions). + Differently to other matchers, ``custom_completer_matcher`` will not suppress + Jedi results to match behaviour in earlier IPython versions. + +Custom matchers can be added by appending to ``IPCompleter.custom_matchers`` list. + +Matcher API +----------- + +Simplifying some details, the ``Matcher`` interface can described as + +.. code-block:: + + MatcherAPIv1 = Callable[[str], list[str]] + MatcherAPIv2 = Callable[[CompletionContext], SimpleMatcherResult] + + Matcher = MatcherAPIv1 | MatcherAPIv2 + +The ``MatcherAPIv1`` reflects the matcher API as available prior to IPython 8.6.0 +and remains supported as a simplest way for generating completions. This is also +currently the only API supported by the IPython hooks system `complete_command`. + +To distinguish between matcher versions ``matcher_api_version`` attribute is used. +More precisely, the API allows to omit ``matcher_api_version`` for v1 Matchers, +and requires a literal ``2`` for v2 Matchers. + +Once the API stabilises future versions may relax the requirement for specifying +``matcher_api_version`` by switching to :any:`functools.singledispatch`, therefore +please do not rely on the presence of ``matcher_api_version`` for any purposes. + +Suppression of competing matchers +--------------------------------- + +By default results from all matchers are combined, in the order determined by +their priority. Matchers can request to suppress results from subsequent +matchers by setting ``suppress`` to ``True`` in the ``MatcherResult``. + +When multiple matchers simultaneously request surpression, the results from of +the matcher with higher priority will be returned. + +Sometimes it is desirable to suppress most but not all other matchers; +this can be achieved by adding a list of identifiers of matchers which +should not be suppressed to ``MatcherResult`` under ``do_not_suppress`` key. + +The suppression behaviour can is user-configurable via +:std:configtrait:`IPCompleter.suppress_competing_matchers`. +""" + + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. +# +# Some of this code originated from rlcompleter in the Python standard library +# Copyright (C) 2001 Python Software Foundation, www.python.org + +from __future__ import annotations +import builtins as builtin_mod +import enum +import glob +import inspect +import itertools +import keyword +import os +import re +import string +import sys +import tokenize +import time +import unicodedata +import uuid +import warnings +from ast import literal_eval +from collections import defaultdict +from contextlib import contextmanager +from dataclasses import dataclass +from functools import cached_property, partial +from types import SimpleNamespace +from typing import ( + Iterable, + Iterator, + List, + Tuple, + Union, + Any, + Sequence, + Dict, + Optional, + TYPE_CHECKING, + Set, + Sized, + TypeVar, + Literal, +) + +from IPython.core.guarded_eval import guarded_eval, EvaluationContext +from IPython.core.error import TryNext +from IPython.core.inputtransformer2 import ESC_MAGIC +from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol +from IPython.core.oinspect import InspectColors +from IPython.testing.skipdoctest import skip_doctest +from IPython.utils import generics +from IPython.utils.decorators import sphinx_options +from IPython.utils.dir2 import dir2, get_real_method +from IPython.utils.docs import GENERATING_DOCUMENTATION +from IPython.utils.path import ensure_dir_exists +from IPython.utils.process import arg_split +from traitlets import ( + Bool, + Enum, + Int, + List as ListTrait, + Unicode, + Dict as DictTrait, + Union as UnionTrait, + observe, +) +from traitlets.config.configurable import Configurable + +import __main__ + +# skip module docstests +__skip_doctest__ = True + + +try: + import jedi + jedi.settings.case_insensitive_completion = False + import jedi.api.helpers + import jedi.api.classes + JEDI_INSTALLED = True +except ImportError: + JEDI_INSTALLED = False + + +if TYPE_CHECKING or GENERATING_DOCUMENTATION and sys.version_info >= (3, 11): + from typing import cast + from typing_extensions import TypedDict, NotRequired, Protocol, TypeAlias, TypeGuard +else: + from typing import Generic + + def cast(type_, obj): + """Workaround for `TypeError: MatcherAPIv2() takes no arguments`""" + return obj + + # do not require on runtime + NotRequired = Tuple # requires Python >=3.11 + TypedDict = Dict # by extension of `NotRequired` requires 3.11 too + Protocol = object # requires Python >=3.8 + TypeAlias = Any # requires Python >=3.10 + TypeGuard = Generic # requires Python >=3.10 +if GENERATING_DOCUMENTATION: + from typing import TypedDict + +# ----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# ranges where we have most of the valid unicode names. We could be more finer +# grained but is it worth it for performance While unicode have character in the +# range 0, 0x110000, we seem to have name for about 10% of those. (131808 as I +# write this). With below range we cover them all, with a density of ~67% +# biggest next gap we consider only adds up about 1% density and there are 600 +# gaps that would need hard coding. +_UNICODE_RANGES = [(32, 0x323B0), (0xE0001, 0xE01F0)] + +# Public API +__all__ = ["Completer", "IPCompleter"] + +if sys.platform == 'win32': + PROTECTABLES = ' ' +else: + PROTECTABLES = ' ()[]{}?=\\|;:\'#*"^&' + +# Protect against returning an enormous number of completions which the frontend +# may have trouble processing. +MATCHES_LIMIT = 500 + +# Completion type reported when no type can be inferred. +_UNKNOWN_TYPE = "" + +# sentinel value to signal lack of a match +not_found = object() + +class ProvisionalCompleterWarning(FutureWarning): + """ + Exception raise by an experimental feature in this module. + + Wrap code in :any:`provisionalcompleter` context manager if you + are certain you want to use an unstable feature. + """ + pass + +warnings.filterwarnings('error', category=ProvisionalCompleterWarning) + + +@skip_doctest +@contextmanager +def provisionalcompleter(action='ignore'): + """ + This context manager has to be used in any place where unstable completer + behavior and API may be called. + + >>> with provisionalcompleter(): + ... completer.do_experimental_things() # works + + >>> completer.do_experimental_things() # raises. + + .. note:: + + Unstable + + By using this context manager you agree that the API in use may change + without warning, and that you won't complain if they do so. + + You also understand that, if the API is not to your liking, you should report + a bug to explain your use case upstream. + + We'll be happy to get your feedback, feature requests, and improvements on + any of the unstable APIs! + """ + with warnings.catch_warnings(): + warnings.filterwarnings(action, category=ProvisionalCompleterWarning) + yield + + +def has_open_quotes(s): + """Return whether a string has open quotes. + + This simply counts whether the number of quote characters of either type in + the string is odd. + + Returns + ------- + If there is an open quote, the quote character is returned. Else, return + False. + """ + # We check " first, then ', so complex cases with nested quotes will get + # the " to take precedence. + if s.count('"') % 2: + return '"' + elif s.count("'") % 2: + return "'" + else: + return False + + +def protect_filename(s, protectables=PROTECTABLES): + """Escape a string to protect certain characters.""" + if set(s) & set(protectables): + if sys.platform == "win32": + return '"' + s + '"' + else: + return "".join(("\\" + c if c in protectables else c) for c in s) + else: + return s + + +def expand_user(path:str) -> Tuple[str, bool, str]: + """Expand ``~``-style usernames in strings. + + This is similar to :func:`os.path.expanduser`, but it computes and returns + extra information that will be useful if the input was being used in + computing completions, and you wish to return the completions with the + original '~' instead of its expanded value. + + Parameters + ---------- + path : str + String to be expanded. If no ~ is present, the output is the same as the + input. + + Returns + ------- + newpath : str + Result of ~ expansion in the input path. + tilde_expand : bool + Whether any expansion was performed or not. + tilde_val : str + The value that ~ was replaced with. + """ + # Default values + tilde_expand = False + tilde_val = '' + newpath = path + + if path.startswith('~'): + tilde_expand = True + rest = len(path)-1 + newpath = os.path.expanduser(path) + if rest: + tilde_val = newpath[:-rest] + else: + tilde_val = newpath + + return newpath, tilde_expand, tilde_val + + +def compress_user(path:str, tilde_expand:bool, tilde_val:str) -> str: + """Does the opposite of expand_user, with its outputs. + """ + if tilde_expand: + return path.replace(tilde_val, '~') + else: + return path + + +def completions_sorting_key(word): + """key for sorting completions + + This does several things: + + - Demote any completions starting with underscores to the end + - Insert any %magic and %%cellmagic completions in the alphabetical order + by their name + """ + prio1, prio2 = 0, 0 + + if word.startswith('__'): + prio1 = 2 + elif word.startswith('_'): + prio1 = 1 + + if word.endswith('='): + prio1 = -1 + + if word.startswith('%%'): + # If there's another % in there, this is something else, so leave it alone + if not "%" in word[2:]: + word = word[2:] + prio2 = 2 + elif word.startswith('%'): + if not "%" in word[1:]: + word = word[1:] + prio2 = 1 + + return prio1, word, prio2 + + +class _FakeJediCompletion: + """ + This is a workaround to communicate to the UI that Jedi has crashed and to + report a bug. Will be used only id :any:`IPCompleter.debug` is set to true. + + Added in IPython 6.0 so should likely be removed for 7.0 + + """ + + def __init__(self, name): + + self.name = name + self.complete = name + self.type = 'crashed' + self.name_with_symbols = name + self.signature = "" + self._origin = "fake" + self.text = "crashed" + + def __repr__(self): + return '' + + +_JediCompletionLike = Union[jedi.api.Completion, _FakeJediCompletion] + + +class Completion: + """ + Completion object used and returned by IPython completers. + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + This act as a middle ground :any:`Completion` object between the + :any:`jedi.api.classes.Completion` object and the Prompt Toolkit completion + object. While Jedi need a lot of information about evaluator and how the + code should be ran/inspected, PromptToolkit (and other frontend) mostly + need user facing information. + + - Which range should be replaced replaced by what. + - Some metadata (like completion type), or meta information to displayed to + the use user. + + For debugging purpose we can also store the origin of the completion (``jedi``, + ``IPython.python_matches``, ``IPython.magics_matches``...). + """ + + __slots__ = ['start', 'end', 'text', 'type', 'signature', '_origin'] + + def __init__( + self, + start: int, + end: int, + text: str, + *, + type: Optional[str] = None, + _origin="", + signature="", + ) -> None: + warnings.warn( + "``Completion`` is a provisional API (as of IPython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, + stacklevel=2, + ) + + self.start = start + self.end = end + self.text = text + self.type = type + self.signature = signature + self._origin = _origin + + def __repr__(self): + return '' % \ + (self.start, self.end, self.text, self.type or '?', self.signature or '?') + + def __eq__(self, other) -> bool: + """ + Equality and hash do not hash the type (as some completer may not be + able to infer the type), but are use to (partially) de-duplicate + completion. + + Completely de-duplicating completion is a bit tricker that just + comparing as it depends on surrounding text, which Completions are not + aware of. + """ + return self.start == other.start and \ + self.end == other.end and \ + self.text == other.text + + def __hash__(self): + return hash((self.start, self.end, self.text)) + + +class SimpleCompletion: + """Completion item to be included in the dictionary returned by new-style Matcher (API v2). + + .. warning:: + + Provisional + + This class is used to describe the currently supported attributes of + simple completion items, and any additional implementation details + should not be relied on. Additional attributes may be included in + future versions, and meaning of text disambiguated from the current + dual meaning of "text to insert" and "text to used as a label". + """ + + __slots__ = ["text", "type"] + + def __init__(self, text: str, *, type: Optional[str] = None): + self.text = text + self.type = type + + def __repr__(self): + return f"" + + +class _MatcherResultBase(TypedDict): + """Definition of dictionary to be returned by new-style Matcher (API v2).""" + + #: Suffix of the provided ``CompletionContext.token``, if not given defaults to full token. + matched_fragment: NotRequired[str] + + #: Whether to suppress results from all other matchers (True), some + #: matchers (set of identifiers) or none (False); default is False. + suppress: NotRequired[Union[bool, Set[str]]] + + #: Identifiers of matchers which should NOT be suppressed when this matcher + #: requests to suppress all other matchers; defaults to an empty set. + do_not_suppress: NotRequired[Set[str]] + + #: Are completions already ordered and should be left as-is? default is False. + ordered: NotRequired[bool] + + +@sphinx_options(show_inherited_members=True, exclude_inherited_from=["dict"]) +class SimpleMatcherResult(_MatcherResultBase, TypedDict): + """Result of new-style completion matcher.""" + + # note: TypedDict is added again to the inheritance chain + # in order to get __orig_bases__ for documentation + + #: List of candidate completions + completions: Sequence[SimpleCompletion] | Iterator[SimpleCompletion] + + +class _JediMatcherResult(_MatcherResultBase): + """Matching result returned by Jedi (will be processed differently)""" + + #: list of candidate completions + completions: Iterator[_JediCompletionLike] + + +AnyMatcherCompletion = Union[_JediCompletionLike, SimpleCompletion] +AnyCompletion = TypeVar("AnyCompletion", AnyMatcherCompletion, Completion) + + +@dataclass +class CompletionContext: + """Completion context provided as an argument to matchers in the Matcher API v2.""" + + # rationale: many legacy matchers relied on completer state (`self.text_until_cursor`) + # which was not explicitly visible as an argument of the matcher, making any refactor + # prone to errors; by explicitly passing `cursor_position` we can decouple the matchers + # from the completer, and make substituting them in sub-classes easier. + + #: Relevant fragment of code directly preceding the cursor. + #: The extraction of token is implemented via splitter heuristic + #: (following readline behaviour for legacy reasons), which is user configurable + #: (by switching the greedy mode). + token: str + + #: The full available content of the editor or buffer + full_text: str + + #: Cursor position in the line (the same for ``full_text`` and ``text``). + cursor_position: int + + #: Cursor line in ``full_text``. + cursor_line: int + + #: The maximum number of completions that will be used downstream. + #: Matchers can use this information to abort early. + #: The built-in Jedi matcher is currently excepted from this limit. + # If not given, return all possible completions. + limit: Optional[int] + + @cached_property + def text_until_cursor(self) -> str: + return self.line_with_cursor[: self.cursor_position] + + @cached_property + def line_with_cursor(self) -> str: + return self.full_text.split("\n")[self.cursor_line] + + +#: Matcher results for API v2. +MatcherResult = Union[SimpleMatcherResult, _JediMatcherResult] + + +class _MatcherAPIv1Base(Protocol): + def __call__(self, text: str) -> List[str]: + """Call signature.""" + ... + + #: Used to construct the default matcher identifier + __qualname__: str + + +class _MatcherAPIv1Total(_MatcherAPIv1Base, Protocol): + #: API version + matcher_api_version: Optional[Literal[1]] + + def __call__(self, text: str) -> List[str]: + """Call signature.""" + ... + + +#: Protocol describing Matcher API v1. +MatcherAPIv1: TypeAlias = Union[_MatcherAPIv1Base, _MatcherAPIv1Total] + + +class MatcherAPIv2(Protocol): + """Protocol describing Matcher API v2.""" + + #: API version + matcher_api_version: Literal[2] = 2 + + def __call__(self, context: CompletionContext) -> MatcherResult: + """Call signature.""" + ... + + #: Used to construct the default matcher identifier + __qualname__: str + + +Matcher: TypeAlias = Union[MatcherAPIv1, MatcherAPIv2] + + +def _is_matcher_v1(matcher: Matcher) -> TypeGuard[MatcherAPIv1]: + api_version = _get_matcher_api_version(matcher) + return api_version == 1 + + +def _is_matcher_v2(matcher: Matcher) -> TypeGuard[MatcherAPIv2]: + api_version = _get_matcher_api_version(matcher) + return api_version == 2 + + +def _is_sizable(value: Any) -> TypeGuard[Sized]: + """Determines whether objects is sizable""" + return hasattr(value, "__len__") + + +def _is_iterator(value: Any) -> TypeGuard[Iterator]: + """Determines whether objects is sizable""" + return hasattr(value, "__next__") + + +def has_any_completions(result: MatcherResult) -> bool: + """Check if any result includes any completions.""" + completions = result["completions"] + if _is_sizable(completions): + return len(completions) != 0 + if _is_iterator(completions): + try: + old_iterator = completions + first = next(old_iterator) + result["completions"] = cast( + Iterator[SimpleCompletion], + itertools.chain([first], old_iterator), + ) + return True + except StopIteration: + return False + raise ValueError( + "Completions returned by matcher need to be an Iterator or a Sizable" + ) + + +def completion_matcher( + *, + priority: Optional[float] = None, + identifier: Optional[str] = None, + api_version: int = 1, +): + """Adds attributes describing the matcher. + + Parameters + ---------- + priority : Optional[float] + The priority of the matcher, determines the order of execution of matchers. + Higher priority means that the matcher will be executed first. Defaults to 0. + identifier : Optional[str] + identifier of the matcher allowing users to modify the behaviour via traitlets, + and also used to for debugging (will be passed as ``origin`` with the completions). + + Defaults to matcher function's ``__qualname__`` (for example, + ``IPCompleter.file_matcher`` for the built-in matched defined + as a ``file_matcher`` method of the ``IPCompleter`` class). + api_version: Optional[int] + version of the Matcher API used by this matcher. + Currently supported values are 1 and 2. + Defaults to 1. + """ + + def wrapper(func: Matcher): + func.matcher_priority = priority or 0 # type: ignore + func.matcher_identifier = identifier or func.__qualname__ # type: ignore + func.matcher_api_version = api_version # type: ignore + if TYPE_CHECKING: + if api_version == 1: + func = cast(MatcherAPIv1, func) + elif api_version == 2: + func = cast(MatcherAPIv2, func) + return func + + return wrapper + + +def _get_matcher_priority(matcher: Matcher): + return getattr(matcher, "matcher_priority", 0) + + +def _get_matcher_id(matcher: Matcher): + return getattr(matcher, "matcher_identifier", matcher.__qualname__) + + +def _get_matcher_api_version(matcher): + return getattr(matcher, "matcher_api_version", 1) + + +context_matcher = partial(completion_matcher, api_version=2) + + +_IC = Iterable[Completion] + + +def _deduplicate_completions(text: str, completions: _IC)-> _IC: + """ + Deduplicate a set of completions. + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + + Parameters + ---------- + text : str + text that should be completed. + completions : Iterator[Completion] + iterator over the completions to deduplicate + + Yields + ------ + `Completions` objects + Completions coming from multiple sources, may be different but end up having + the same effect when applied to ``text``. If this is the case, this will + consider completions as equal and only emit the first encountered. + Not folded in `completions()` yet for debugging purpose, and to detect when + the IPython completer does return things that Jedi does not, but should be + at some point. + """ + completions = list(completions) + if not completions: + return + + new_start = min(c.start for c in completions) + new_end = max(c.end for c in completions) + + seen = set() + for c in completions: + new_text = text[new_start:c.start] + c.text + text[c.end:new_end] + if new_text not in seen: + yield c + seen.add(new_text) + + +def rectify_completions(text: str, completions: _IC, *, _debug: bool = False) -> _IC: + """ + Rectify a set of completions to all have the same ``start`` and ``end`` + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + Parameters + ---------- + text : str + text that should be completed. + completions : Iterator[Completion] + iterator over the completions to rectify + _debug : bool + Log failed completion + + Notes + ----- + :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though + the Jupyter Protocol requires them to behave like so. This will readjust + the completion to have the same ``start`` and ``end`` by padding both + extremities with surrounding text. + + During stabilisation should support a ``_debug`` option to log which + completion are return by the IPython completer and not found in Jedi in + order to make upstream bug report. + """ + warnings.warn("`rectify_completions` is a provisional API (as of IPython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + completions = list(completions) + if not completions: + return + starts = (c.start for c in completions) + ends = (c.end for c in completions) + + new_start = min(starts) + new_end = max(ends) + + seen_jedi = set() + seen_python_matches = set() + for c in completions: + new_text = text[new_start:c.start] + c.text + text[c.end:new_end] + if c._origin == 'jedi': + seen_jedi.add(new_text) + elif c._origin == 'IPCompleter.python_matches': + seen_python_matches.add(new_text) + yield Completion(new_start, new_end, new_text, type=c.type, _origin=c._origin, signature=c.signature) + diff = seen_python_matches.difference(seen_jedi) + if diff and _debug: + print('IPython.python matches have extras:', diff) + + +if sys.platform == 'win32': + DELIMS = ' \t\n`!@#$^&*()=+[{]}|;\'",<>?' +else: + DELIMS = ' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?' + +GREEDY_DELIMS = ' =\r\n' + + +class CompletionSplitter(object): + """An object to split an input line in a manner similar to readline. + + By having our own implementation, we can expose readline-like completion in + a uniform manner to all frontends. This object only needs to be given the + line of text to be split and the cursor position on said line, and it + returns the 'word' to be completed on at the cursor after splitting the + entire line. + + What characters are used as splitting delimiters can be controlled by + setting the ``delims`` attribute (this is a property that internally + automatically builds the necessary regular expression)""" + + # Private interface + + # A string of delimiter characters. The default value makes sense for + # IPython's most typical usage patterns. + _delims = DELIMS + + # The expression (a normal string) to be compiled into a regular expression + # for actual splitting. We store it as an attribute mostly for ease of + # debugging, since this type of code can be so tricky to debug. + _delim_expr = None + + # The regular expression that does the actual splitting + _delim_re = None + + def __init__(self, delims=None): + delims = CompletionSplitter._delims if delims is None else delims + self.delims = delims + + @property + def delims(self): + """Return the string of delimiter characters.""" + return self._delims + + @delims.setter + def delims(self, delims): + """Set the delimiters for line splitting.""" + expr = '[' + ''.join('\\'+ c for c in delims) + ']' + self._delim_re = re.compile(expr) + self._delims = delims + self._delim_expr = expr + + def split_line(self, line, cursor_pos=None): + """Split a line of text with a cursor at the given position. + """ + l = line if cursor_pos is None else line[:cursor_pos] + return self._delim_re.split(l)[-1] + + + +class Completer(Configurable): + + greedy = Bool( + False, + help="""Activate greedy completion. + + .. deprecated:: 8.8 + Use :std:configtrait:`Completer.evaluation` and :std:configtrait:`Completer.auto_close_dict_keys` instead. + + When enabled in IPython 8.8 or newer, changes configuration as follows: + + - ``Completer.evaluation = 'unsafe'`` + - ``Completer.auto_close_dict_keys = True`` + """, + ).tag(config=True) + + evaluation = Enum( + ("forbidden", "minimal", "limited", "unsafe", "dangerous"), + default_value="limited", + help="""Policy for code evaluation under completion. + + Successive options allow to enable more eager evaluation for better + completion suggestions, including for nested dictionaries, nested lists, + or even results of function calls. + Setting ``unsafe`` or higher can lead to evaluation of arbitrary user + code on :kbd:`Tab` with potentially unwanted or dangerous side effects. + + Allowed values are: + + - ``forbidden``: no evaluation of code is permitted, + - ``minimal``: evaluation of literals and access to built-in namespace; + no item/attribute evaluationm no access to locals/globals, + no evaluation of any operations or comparisons. + - ``limited``: access to all namespaces, evaluation of hard-coded methods + (for example: :any:`dict.keys`, :any:`object.__getattr__`, + :any:`object.__getitem__`) on allow-listed objects (for example: + :any:`dict`, :any:`list`, :any:`tuple`, ``pandas.Series``), + - ``unsafe``: evaluation of all methods and function calls but not of + syntax with side-effects like `del x`, + - ``dangerous``: completely arbitrary evaluation. + """, + ).tag(config=True) + + use_jedi = Bool(default_value=JEDI_INSTALLED, + help="Experimental: Use Jedi to generate autocompletions. " + "Default to True if jedi is installed.").tag(config=True) + + jedi_compute_type_timeout = Int(default_value=400, + help="""Experimental: restrict time (in milliseconds) during which Jedi can compute types. + Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt + performance by preventing jedi to build its cache. + """).tag(config=True) + + debug = Bool(default_value=False, + help='Enable debug for the Completer. Mostly print extra ' + 'information for experimental jedi integration.')\ + .tag(config=True) + + backslash_combining_completions = Bool(True, + help="Enable unicode completions, e.g. \\alpha . " + "Includes completion of latex commands, unicode names, and expanding " + "unicode characters back to latex commands.").tag(config=True) + + auto_close_dict_keys = Bool( + False, + help=""" + Enable auto-closing dictionary keys. + + When enabled string keys will be suffixed with a final quote + (matching the opening quote), tuple keys will also receive a + separating comma if needed, and keys which are final will + receive a closing bracket (``]``). + """, + ).tag(config=True) + + def __init__(self, namespace=None, global_namespace=None, **kwargs): + """Create a new completer for the command line. + + Completer(namespace=ns, global_namespace=ns2) -> completer instance. + + If unspecified, the default namespace where completions are performed + is __main__ (technically, __main__.__dict__). Namespaces should be + given as dictionaries. + + An optional second namespace can be given. This allows the completer + to handle cases where both the local and global scopes need to be + distinguished. + """ + + # Don't bind to namespace quite yet, but flag whether the user wants a + # specific namespace or to use __main__.__dict__. This will allow us + # to bind to __main__.__dict__ at completion time, not now. + if namespace is None: + self.use_main_ns = True + else: + self.use_main_ns = False + self.namespace = namespace + + # The global namespace, if given, can be bound directly + if global_namespace is None: + self.global_namespace = {} + else: + self.global_namespace = global_namespace + + self.custom_matchers = [] + + super(Completer, self).__init__(**kwargs) + + def complete(self, text, state): + """Return the next possible completion for 'text'. + + This is called successively with state == 0, 1, 2, ... until it + returns None. The completion should begin with 'text'. + + """ + if self.use_main_ns: + self.namespace = __main__.__dict__ + + if state == 0: + if "." in text: + self.matches = self.attr_matches(text) + else: + self.matches = self.global_matches(text) + try: + return self.matches[state] + except IndexError: + return None + + def global_matches(self, text): + """Compute matches when text is a simple name. + + Return a list of all keywords, built-in functions and names currently + defined in self.namespace or self.global_namespace that match. + + """ + matches = [] + match_append = matches.append + n = len(text) + for lst in [ + keyword.kwlist, + builtin_mod.__dict__.keys(), + list(self.namespace.keys()), + list(self.global_namespace.keys()), + ]: + for word in lst: + if word[:n] == text and word != "__builtins__": + match_append(word) + + snake_case_re = re.compile(r"[^_]+(_[^_]+)+?\Z") + for lst in [list(self.namespace.keys()), list(self.global_namespace.keys())]: + shortened = { + "_".join([sub[0] for sub in word.split("_")]): word + for word in lst + if snake_case_re.match(word) + } + for word in shortened.keys(): + if word[:n] == text and word != "__builtins__": + match_append(shortened[word]) + return matches + + def attr_matches(self, text): + """Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in self.namespace or self.global_namespace, it will be + evaluated and its attributes (as revealed by dir()) are used as + possible completions. (For class instances, class members are + also considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + m2 = re.match(r"(.+)\.(\w*)$", self.line_buffer) + if not m2: + return [] + expr, attr = m2.group(1, 2) + + obj = self._evaluate_expr(expr) + + if obj is not_found: + return [] + + if self.limit_to__all__ and hasattr(obj, '__all__'): + words = get__all__entries(obj) + else: + words = dir2(obj) + + try: + words = generics.complete_object(obj, words) + except TryNext: + pass + except AssertionError: + raise + except Exception: + # Silence errors from completion function + #raise # dbg + pass + # Build match list to return + n = len(attr) + return ["%s.%s" % (expr, w) for w in words if w[:n] == attr] + + def _evaluate_expr(self, expr): + obj = not_found + done = False + while not done and expr: + try: + obj = guarded_eval( + expr, + EvaluationContext( + globals=self.global_namespace, + locals=self.namespace, + evaluation=self.evaluation, + ), + ) + done = True + except Exception as e: + if self.debug: + print("Evaluation exception", e) + # trim the expression to remove any invalid prefix + # e.g. user starts `(d[`, so we get `expr = '(d'`, + # where parenthesis is not closed. + # TODO: make this faster by reusing parts of the computation? + expr = expr[1:] + return obj + +def get__all__entries(obj): + """returns the strings in the __all__ attribute""" + try: + words = getattr(obj, '__all__') + except: + return [] + + return [w for w in words if isinstance(w, str)] + + +class _DictKeyState(enum.Flag): + """Represent state of the key match in context of other possible matches. + + - given `d1 = {'a': 1}` completion on `d1['` will yield `{'a': END_OF_ITEM}` as there is no tuple. + - given `d2 = {('a', 'b'): 1}`: `d2['a', '` will yield `{'b': END_OF_TUPLE}` as there is no tuple members to add beyond `'b'`. + - given `d3 = {('a', 'b'): 1}`: `d3['` will yield `{'a': IN_TUPLE}` as `'a'` can be added. + - given `d4 = {'a': 1, ('a', 'b'): 2}`: `d4['` will yield `{'a': END_OF_ITEM & END_OF_TUPLE}` + """ + + BASELINE = 0 + END_OF_ITEM = enum.auto() + END_OF_TUPLE = enum.auto() + IN_TUPLE = enum.auto() + + +def _parse_tokens(c): + """Parse tokens even if there is an error.""" + tokens = [] + token_generator = tokenize.generate_tokens(iter(c.splitlines()).__next__) + while True: + try: + tokens.append(next(token_generator)) + except tokenize.TokenError: + return tokens + except StopIteration: + return tokens + + +def _match_number_in_dict_key_prefix(prefix: str) -> Union[str, None]: + """Match any valid Python numeric literal in a prefix of dictionary keys. + + References: + - https://docs.python.org/3/reference/lexical_analysis.html#numeric-literals + - https://docs.python.org/3/library/tokenize.html + """ + if prefix[-1].isspace(): + # if user typed a space we do not have anything to complete + # even if there was a valid number token before + return None + tokens = _parse_tokens(prefix) + rev_tokens = reversed(tokens) + skip_over = {tokenize.ENDMARKER, tokenize.NEWLINE} + number = None + for token in rev_tokens: + if token.type in skip_over: + continue + if number is None: + if token.type == tokenize.NUMBER: + number = token.string + continue + else: + # we did not match a number + return None + if token.type == tokenize.OP: + if token.string == ",": + break + if token.string in {"+", "-"}: + number = token.string + number + else: + return None + return number + + +_INT_FORMATS = { + "0b": bin, + "0o": oct, + "0x": hex, +} + + +def match_dict_keys( + keys: List[Union[str, bytes, Tuple[Union[str, bytes], ...]]], + prefix: str, + delims: str, + extra_prefix: Optional[Tuple[Union[str, bytes], ...]] = None, +) -> Tuple[str, int, Dict[str, _DictKeyState]]: + """Used by dict_key_matches, matching the prefix to a list of keys + + Parameters + ---------- + keys + list of keys in dictionary currently being completed. + prefix + Part of the text already typed by the user. E.g. `mydict[b'fo` + delims + String of delimiters to consider when finding the current key. + extra_prefix : optional + Part of the text already typed in multi-key index cases. E.g. for + `mydict['foo', "bar", 'b`, this would be `('foo', 'bar')`. + + Returns + ------- + A tuple of three elements: ``quote``, ``token_start``, ``matched``, with + ``quote`` being the quote that need to be used to close current string. + ``token_start`` the position where the replacement should start occurring, + ``matches`` a dictionary of replacement/completion keys on keys and values + indicating whether the state. + """ + prefix_tuple = extra_prefix if extra_prefix else () + + prefix_tuple_size = sum( + [ + # for pandas, do not count slices as taking space + not isinstance(k, slice) + for k in prefix_tuple + ] + ) + text_serializable_types = (str, bytes, int, float, slice) + + def filter_prefix_tuple(key): + # Reject too short keys + if len(key) <= prefix_tuple_size: + return False + # Reject keys which cannot be serialised to text + for k in key: + if not isinstance(k, text_serializable_types): + return False + # Reject keys that do not match the prefix + for k, pt in zip(key, prefix_tuple): + if k != pt and not isinstance(pt, slice): + return False + # All checks passed! + return True + + filtered_key_is_final: Dict[ + Union[str, bytes, int, float], _DictKeyState + ] = defaultdict(lambda: _DictKeyState.BASELINE) + + for k in keys: + # If at least one of the matches is not final, mark as undetermined. + # This can happen with `d = {111: 'b', (111, 222): 'a'}` where + # `111` appears final on first match but is not final on the second. + + if isinstance(k, tuple): + if filter_prefix_tuple(k): + key_fragment = k[prefix_tuple_size] + filtered_key_is_final[key_fragment] |= ( + _DictKeyState.END_OF_TUPLE + if len(k) == prefix_tuple_size + 1 + else _DictKeyState.IN_TUPLE + ) + elif prefix_tuple_size > 0: + # we are completing a tuple but this key is not a tuple, + # so we should ignore it + pass + else: + if isinstance(k, text_serializable_types): + filtered_key_is_final[k] |= _DictKeyState.END_OF_ITEM + + filtered_keys = filtered_key_is_final.keys() + + if not prefix: + return "", 0, {repr(k): v for k, v in filtered_key_is_final.items()} + + quote_match = re.search("(?:\"|')", prefix) + is_user_prefix_numeric = False + + if quote_match: + quote = quote_match.group() + valid_prefix = prefix + quote + try: + prefix_str = literal_eval(valid_prefix) + except Exception: + return "", 0, {} + else: + # If it does not look like a string, let's assume + # we are dealing with a number or variable. + number_match = _match_number_in_dict_key_prefix(prefix) + + # We do not want the key matcher to suggest variable names so we yield: + if number_match is None: + # The alternative would be to assume that user forgort the quote + # and if the substring matches, suggest adding it at the start. + return "", 0, {} + + prefix_str = number_match + is_user_prefix_numeric = True + quote = "" + + pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' + token_match = re.search(pattern, prefix, re.UNICODE) + assert token_match is not None # silence mypy + token_start = token_match.start() + token_prefix = token_match.group() + + matched: Dict[str, _DictKeyState] = {} + + str_key: Union[str, bytes] + + for key in filtered_keys: + if isinstance(key, (int, float)): + # User typed a number but this key is not a number. + if not is_user_prefix_numeric: + continue + str_key = str(key) + if isinstance(key, int): + int_base = prefix_str[:2].lower() + # if user typed integer using binary/oct/hex notation: + if int_base in _INT_FORMATS: + int_format = _INT_FORMATS[int_base] + str_key = int_format(key) + else: + # User typed a string but this key is a number. + if is_user_prefix_numeric: + continue + str_key = key + try: + if not str_key.startswith(prefix_str): + continue + except (AttributeError, TypeError, UnicodeError) as e: + # Python 3+ TypeError on b'a'.startswith('a') or vice-versa + continue + + # reformat remainder of key to begin with prefix + rem = str_key[len(prefix_str) :] + # force repr wrapped in ' + rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"') + rem_repr = rem_repr[1 + rem_repr.index("'"):-2] + if quote == '"': + # The entered prefix is quoted with ", + # but the match is quoted with '. + # A contained " hence needs escaping for comparison: + rem_repr = rem_repr.replace('"', '\\"') + + # then reinsert prefix from start of token + match = "%s%s" % (token_prefix, rem_repr) + + matched[match] = filtered_key_is_final[key] + return quote, token_start, matched + + +def cursor_to_position(text:str, line:int, column:int)->int: + """ + Convert the (line,column) position of the cursor in text to an offset in a + string. + + Parameters + ---------- + text : str + The text in which to calculate the cursor offset + line : int + Line of the cursor; 0-indexed + column : int + Column of the cursor 0-indexed + + Returns + ------- + Position of the cursor in ``text``, 0-indexed. + + See Also + -------- + position_to_cursor : reciprocal of this function + + """ + lines = text.split('\n') + assert line <= len(lines), '{} <= {}'.format(str(line), str(len(lines))) + + return sum(len(l) + 1 for l in lines[:line]) + column + +def position_to_cursor(text:str, offset:int)->Tuple[int, int]: + """ + Convert the position of the cursor in text (0 indexed) to a line + number(0-indexed) and a column number (0-indexed) pair + + Position should be a valid position in ``text``. + + Parameters + ---------- + text : str + The text in which to calculate the cursor offset + offset : int + Position of the cursor in ``text``, 0-indexed. + + Returns + ------- + (line, column) : (int, int) + Line of the cursor; 0-indexed, column of the cursor 0-indexed + + See Also + -------- + cursor_to_position : reciprocal of this function + + """ + + assert 0 <= offset <= len(text) , "0 <= %s <= %s" % (offset , len(text)) + + before = text[:offset] + blines = before.split('\n') # ! splitnes trim trailing \n + line = before.count('\n') + col = len(blines[-1]) + return line, col + + +def _safe_isinstance(obj, module, class_name, *attrs): + """Checks if obj is an instance of module.class_name if loaded + """ + if module in sys.modules: + m = sys.modules[module] + for attr in [class_name, *attrs]: + m = getattr(m, attr) + return isinstance(obj, m) + + +@context_matcher() +def back_unicode_name_matcher(context: CompletionContext): + """Match Unicode characters back to Unicode name + + Same as :any:`back_unicode_name_matches`, but adopted to new Matcher API. + """ + fragment, matches = back_unicode_name_matches(context.text_until_cursor) + return _convert_matcher_v1_result_to_v2( + matches, type="unicode", fragment=fragment, suppress_if_matches=True + ) + + +def back_unicode_name_matches(text: str) -> Tuple[str, Sequence[str]]: + """Match Unicode characters back to Unicode name + + This does ``☃`` -> ``\\snowman`` + + Note that snowman is not a valid python3 combining character but will be expanded. + Though it will not recombine back to the snowman character by the completion machinery. + + This will not either back-complete standard sequences like \\n, \\b ... + + .. deprecated:: 8.6 + You can use :meth:`back_unicode_name_matcher` instead. + + Returns + ======= + + Return a tuple with two elements: + + - The Unicode character that was matched (preceded with a backslash), or + empty string, + - a sequence (of 1), name for the match Unicode character, preceded by + backslash, or empty if no match. + """ + if len(text)<2: + return '', () + maybe_slash = text[-2] + if maybe_slash != '\\': + return '', () + + char = text[-1] + # no expand on quote for completion in strings. + # nor backcomplete standard ascii keys + if char in string.ascii_letters or char in ('"',"'"): + return '', () + try : + unic = unicodedata.name(char) + return '\\'+char,('\\'+unic,) + except KeyError: + pass + return '', () + + +@context_matcher() +def back_latex_name_matcher(context: CompletionContext): + """Match latex characters back to unicode name + + Same as :any:`back_latex_name_matches`, but adopted to new Matcher API. + """ + fragment, matches = back_latex_name_matches(context.text_until_cursor) + return _convert_matcher_v1_result_to_v2( + matches, type="latex", fragment=fragment, suppress_if_matches=True + ) + + +def back_latex_name_matches(text: str) -> Tuple[str, Sequence[str]]: + """Match latex characters back to unicode name + + This does ``\\ℵ`` -> ``\\aleph`` + + .. deprecated:: 8.6 + You can use :meth:`back_latex_name_matcher` instead. + """ + if len(text)<2: + return '', () + maybe_slash = text[-2] + if maybe_slash != '\\': + return '', () + + + char = text[-1] + # no expand on quote for completion in strings. + # nor backcomplete standard ascii keys + if char in string.ascii_letters or char in ('"',"'"): + return '', () + try : + latex = reverse_latex_symbol[char] + # '\\' replace the \ as well + return '\\'+char,[latex] + except KeyError: + pass + return '', () + + +def _formatparamchildren(parameter) -> str: + """ + Get parameter name and value from Jedi Private API + + Jedi does not expose a simple way to get `param=value` from its API. + + Parameters + ---------- + parameter + Jedi's function `Param` + + Returns + ------- + A string like 'a', 'b=1', '*args', '**kwargs' + + """ + description = parameter.description + if not description.startswith('param '): + raise ValueError('Jedi function parameter description have change format.' + 'Expected "param ...", found %r".' % description) + return description[6:] + +def _make_signature(completion)-> str: + """ + Make the signature from a jedi completion + + Parameters + ---------- + completion : jedi.Completion + object does not complete a function type + + Returns + ------- + a string consisting of the function signature, with the parenthesis but + without the function name. example: + `(a, *args, b=1, **kwargs)` + + """ + + # it looks like this might work on jedi 0.17 + if hasattr(completion, 'get_signatures'): + signatures = completion.get_signatures() + if not signatures: + return '(?)' + + c0 = completion.get_signatures()[0] + return '('+c0.to_string().split('(', maxsplit=1)[1] + + return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for signature in completion.get_signatures() + for p in signature.defined_names()) if f]) + + +_CompleteResult = Dict[str, MatcherResult] + + +DICT_MATCHER_REGEX = re.compile( + r"""(?x) +( # match dict-referring - or any get item object - expression + .+ +) +\[ # open bracket +\s* # and optional whitespace +# Capture any number of serializable objects (e.g. "a", "b", 'c') +# and slices +((?:(?: + (?: # closed string + [uUbB]? # string prefix (r not handled) + (?: + '(?:[^']|(? SimpleMatcherResult: + """Utility to help with transition""" + result = { + "completions": [SimpleCompletion(text=match, type=type) for match in matches], + "suppress": (True if matches else False) if suppress_if_matches else False, + } + if fragment is not None: + result["matched_fragment"] = fragment + return cast(SimpleMatcherResult, result) + + +class IPCompleter(Completer): + """Extension of the completer class with IPython-specific features""" + + @observe('greedy') + def _greedy_changed(self, change): + """update the splitter and readline delims when greedy is changed""" + if change["new"]: + self.evaluation = "unsafe" + self.auto_close_dict_keys = True + self.splitter.delims = GREEDY_DELIMS + else: + self.evaluation = "limited" + self.auto_close_dict_keys = False + self.splitter.delims = DELIMS + + dict_keys_only = Bool( + False, + help=""" + Whether to show dict key matches only. + + (disables all matchers except for `IPCompleter.dict_key_matcher`). + """, + ) + + suppress_competing_matchers = UnionTrait( + [Bool(allow_none=True), DictTrait(Bool(None, allow_none=True))], + default_value=None, + help=""" + Whether to suppress completions from other *Matchers*. + + When set to ``None`` (default) the matchers will attempt to auto-detect + whether suppression of other matchers is desirable. For example, at + the beginning of a line followed by `%` we expect a magic completion + to be the only applicable option, and after ``my_dict['`` we usually + expect a completion with an existing dictionary key. + + If you want to disable this heuristic and see completions from all matchers, + set ``IPCompleter.suppress_competing_matchers = False``. + To disable the heuristic for specific matchers provide a dictionary mapping: + ``IPCompleter.suppress_competing_matchers = {'IPCompleter.dict_key_matcher': False}``. + + Set ``IPCompleter.suppress_competing_matchers = True`` to limit + completions to the set of matchers with the highest priority; + this is equivalent to ``IPCompleter.merge_completions`` and + can be beneficial for performance, but will sometimes omit relevant + candidates from matchers further down the priority list. + """, + ).tag(config=True) + + merge_completions = Bool( + True, + help="""Whether to merge completion results into a single list + + If False, only the completion results from the first non-empty + completer will be returned. + + As of version 8.6.0, setting the value to ``False`` is an alias for: + ``IPCompleter.suppress_competing_matchers = True.``. + """, + ).tag(config=True) + + disable_matchers = ListTrait( + Unicode(), + help="""List of matchers to disable. + + The list should contain matcher identifiers (see :any:`completion_matcher`). + """, + ).tag(config=True) + + omit__names = Enum( + (0, 1, 2), + default_value=2, + help="""Instruct the completer to omit private method names + + Specifically, when completing on ``object.``. + + When 2 [default]: all names that start with '_' will be excluded. + + When 1: all 'magic' names (``__foo__``) will be excluded. + + When 0: nothing will be excluded. + """ + ).tag(config=True) + limit_to__all__ = Bool(False, + help=""" + DEPRECATED as of version 5.0. + + Instruct the completer to use __all__ for the completion + + Specifically, when completing on ``object.``. + + When True: only those names in obj.__all__ will be included. + + When False [default]: the __all__ attribute is ignored + """, + ).tag(config=True) + + profile_completions = Bool( + default_value=False, + help="If True, emit profiling data for completion subsystem using cProfile." + ).tag(config=True) + + profiler_output_dir = Unicode( + default_value=".completion_profiles", + help="Template for path at which to output profile data for completions." + ).tag(config=True) + + @observe('limit_to__all__') + def _limit_to_all_changed(self, change): + warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration ' + 'value has been deprecated since IPython 5.0, will be made to have ' + 'no effects and then removed in future version of IPython.', + UserWarning) + + def __init__( + self, shell=None, namespace=None, global_namespace=None, config=None, **kwargs + ): + """IPCompleter() -> completer + + Return a completer object. + + Parameters + ---------- + shell + a pointer to the ipython shell itself. This is needed + because this completer knows about magic functions, and those can + only be accessed via the ipython instance. + namespace : dict, optional + an optional dict where completions are performed. + global_namespace : dict, optional + secondary optional dict for completions, to + handle cases (such as IPython embedded inside functions) where + both Python scopes are visible. + config : Config + traitlet's config object + **kwargs + passed to super class unmodified. + """ + + self.magic_escape = ESC_MAGIC + self.splitter = CompletionSplitter() + + # _greedy_changed() depends on splitter and readline being defined: + super().__init__( + namespace=namespace, + global_namespace=global_namespace, + config=config, + **kwargs, + ) + + # List where completion matches will be stored + self.matches = [] + self.shell = shell + # Regexp to split filenames with spaces in them + self.space_name_re = re.compile(r'([^\\] )') + # Hold a local ref. to glob.glob for speed + self.glob = glob.glob + + # Determine if we are running on 'dumb' terminals, like (X)Emacs + # buffers, to avoid completion problems. + term = os.environ.get('TERM','xterm') + self.dumb_terminal = term in ['dumb','emacs'] + + # Special handling of backslashes needed in win32 platforms + if sys.platform == "win32": + self.clean_glob = self._clean_glob_win32 + else: + self.clean_glob = self._clean_glob + + #regexp to parse docstring for function signature + self.docstring_sig_re = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') + self.docstring_kwd_re = re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') + #use this if positional argument name is also needed + #= re.compile(r'[\s|\[]*(\w+)(?:\s*=?\s*.*)') + + self.magic_arg_matchers = [ + self.magic_config_matcher, + self.magic_color_matcher, + ] + + # This is set externally by InteractiveShell + self.custom_completers = None + + # This is a list of names of unicode characters that can be completed + # into their corresponding unicode value. The list is large, so we + # lazily initialize it on first use. Consuming code should access this + # attribute through the `@unicode_names` property. + self._unicode_names = None + + self._backslash_combining_matchers = [ + self.latex_name_matcher, + self.unicode_name_matcher, + back_latex_name_matcher, + back_unicode_name_matcher, + self.fwd_unicode_matcher, + ] + + if not self.backslash_combining_completions: + for matcher in self._backslash_combining_matchers: + self.disable_matchers.append(_get_matcher_id(matcher)) + + if not self.merge_completions: + self.suppress_competing_matchers = True + + @property + def matchers(self) -> List[Matcher]: + """All active matcher routines for completion""" + if self.dict_keys_only: + return [self.dict_key_matcher] + + if self.use_jedi: + return [ + *self.custom_matchers, + *self._backslash_combining_matchers, + *self.magic_arg_matchers, + self.custom_completer_matcher, + self.magic_matcher, + self._jedi_matcher, + self.dict_key_matcher, + self.file_matcher, + ] + else: + return [ + *self.custom_matchers, + *self._backslash_combining_matchers, + *self.magic_arg_matchers, + self.custom_completer_matcher, + self.dict_key_matcher, + # TODO: convert python_matches to v2 API + self.magic_matcher, + self.python_matches, + self.file_matcher, + self.python_func_kw_matcher, + ] + + def all_completions(self, text:str) -> List[str]: + """ + Wrapper around the completion methods for the benefit of emacs. + """ + prefix = text.rpartition('.')[0] + with provisionalcompleter(): + return ['.'.join([prefix, c.text]) if prefix and self.use_jedi else c.text + for c in self.completions(text, len(text))] + + return self.complete(text)[1] + + def _clean_glob(self, text:str): + return self.glob("%s*" % text) + + def _clean_glob_win32(self, text:str): + return [f.replace("\\","/") + for f in self.glob("%s*" % text)] + + @context_matcher() + def file_matcher(self, context: CompletionContext) -> SimpleMatcherResult: + """Same as :any:`file_matches`, but adopted to new Matcher API.""" + matches = self.file_matches(context.token) + # TODO: add a heuristic for suppressing (e.g. if it has OS-specific delimiter, + # starts with `/home/`, `C:\`, etc) + return _convert_matcher_v1_result_to_v2(matches, type="path") + + def file_matches(self, text: str) -> List[str]: + """Match filenames, expanding ~USER type strings. + + Most of the seemingly convoluted logic in this completer is an + attempt to handle filenames with spaces in them. And yet it's not + quite perfect, because Python's readline doesn't expose all of the + GNU readline details needed for this to be done correctly. + + For a filename with a space in it, the printed completions will be + only the parts after what's already been typed (instead of the + full completions, as is normally done). I don't think with the + current (as of Python 2.3) Python readline it's possible to do + better. + + .. deprecated:: 8.6 + You can use :meth:`file_matcher` instead. + """ + + # chars that require escaping with backslash - i.e. chars + # that readline treats incorrectly as delimiters, but we + # don't want to treat as delimiters in filename matching + # when escaped with backslash + if text.startswith('!'): + text = text[1:] + text_prefix = u'!' + else: + text_prefix = u'' + + text_until_cursor = self.text_until_cursor + # track strings with open quotes + open_quotes = has_open_quotes(text_until_cursor) + + if '(' in text_until_cursor or '[' in text_until_cursor: + lsplit = text + else: + try: + # arg_split ~ shlex.split, but with unicode bugs fixed by us + lsplit = arg_split(text_until_cursor)[-1] + except ValueError: + # typically an unmatched ", or backslash without escaped char. + if open_quotes: + lsplit = text_until_cursor.split(open_quotes)[-1] + else: + return [] + except IndexError: + # tab pressed on empty line + lsplit = "" + + if not open_quotes and lsplit != protect_filename(lsplit): + # if protectables are found, do matching on the whole escaped name + has_protectables = True + text0,text = text,lsplit + else: + has_protectables = False + text = os.path.expanduser(text) + + if text == "": + return [text_prefix + protect_filename(f) for f in self.glob("*")] + + # Compute the matches from the filesystem + if sys.platform == 'win32': + m0 = self.clean_glob(text) + else: + m0 = self.clean_glob(text.replace('\\', '')) + + if has_protectables: + # If we had protectables, we need to revert our changes to the + # beginning of filename so that we don't double-write the part + # of the filename we have so far + len_lsplit = len(lsplit) + matches = [text_prefix + text0 + + protect_filename(f[len_lsplit:]) for f in m0] + else: + if open_quotes: + # if we have a string with an open quote, we don't need to + # protect the names beyond the quote (and we _shouldn't_, as + # it would cause bugs when the filesystem call is made). + matches = m0 if sys.platform == "win32" else\ + [protect_filename(f, open_quotes) for f in m0] + else: + matches = [text_prefix + + protect_filename(f) for f in m0] + + # Mark directories in input list by appending '/' to their names. + return [x+'/' if os.path.isdir(x) else x for x in matches] + + @context_matcher() + def magic_matcher(self, context: CompletionContext) -> SimpleMatcherResult: + """Match magics.""" + text = context.token + matches = self.magic_matches(text) + result = _convert_matcher_v1_result_to_v2(matches, type="magic") + is_magic_prefix = len(text) > 0 and text[0] == "%" + result["suppress"] = is_magic_prefix and bool(result["completions"]) + return result + + def magic_matches(self, text: str): + """Match magics. + + .. deprecated:: 8.6 + You can use :meth:`magic_matcher` instead. + """ + # Get all shell magics now rather than statically, so magics loaded at + # runtime show up too. + lsm = self.shell.magics_manager.lsmagic() + line_magics = lsm['line'] + cell_magics = lsm['cell'] + pre = self.magic_escape + pre2 = pre+pre + + explicit_magic = text.startswith(pre) + + # Completion logic: + # - user gives %%: only do cell magics + # - user gives %: do both line and cell magics + # - no prefix: do both + # In other words, line magics are skipped if the user gives %% explicitly + # + # We also exclude magics that match any currently visible names: + # https://github.com/ipython/ipython/issues/4877, unless the user has + # typed a %: + # https://github.com/ipython/ipython/issues/10754 + bare_text = text.lstrip(pre) + global_matches = self.global_matches(bare_text) + if not explicit_magic: + def matches(magic): + """ + Filter magics, in particular remove magics that match + a name present in global namespace. + """ + return ( magic.startswith(bare_text) and + magic not in global_matches ) + else: + def matches(magic): + return magic.startswith(bare_text) + + comp = [ pre2+m for m in cell_magics if matches(m)] + if not text.startswith(pre2): + comp += [ pre+m for m in line_magics if matches(m)] + + return comp + + @context_matcher() + def magic_config_matcher(self, context: CompletionContext) -> SimpleMatcherResult: + """Match class names and attributes for %config magic.""" + # NOTE: uses `line_buffer` equivalent for compatibility + matches = self.magic_config_matches(context.line_with_cursor) + return _convert_matcher_v1_result_to_v2(matches, type="param") + + def magic_config_matches(self, text: str) -> List[str]: + """Match class names and attributes for %config magic. + + .. deprecated:: 8.6 + You can use :meth:`magic_config_matcher` instead. + """ + texts = text.strip().split() + + if len(texts) > 0 and (texts[0] == 'config' or texts[0] == '%config'): + # get all configuration classes + classes = sorted(set([ c for c in self.shell.configurables + if c.__class__.class_traits(config=True) + ]), key=lambda x: x.__class__.__name__) + classnames = [ c.__class__.__name__ for c in classes ] + + # return all classnames if config or %config is given + if len(texts) == 1: + return classnames + + # match classname + classname_texts = texts[1].split('.') + classname = classname_texts[0] + classname_matches = [ c for c in classnames + if c.startswith(classname) ] + + # return matched classes or the matched class with attributes + if texts[1].find('.') < 0: + return classname_matches + elif len(classname_matches) == 1 and \ + classname_matches[0] == classname: + cls = classes[classnames.index(classname)].__class__ + help = cls.class_get_help() + # strip leading '--' from cl-args: + help = re.sub(re.compile(r'^--', re.MULTILINE), '', help) + return [ attr.split('=')[0] + for attr in help.strip().splitlines() + if attr.startswith(texts[1]) ] + return [] + + @context_matcher() + def magic_color_matcher(self, context: CompletionContext) -> SimpleMatcherResult: + """Match color schemes for %colors magic.""" + # NOTE: uses `line_buffer` equivalent for compatibility + matches = self.magic_color_matches(context.line_with_cursor) + return _convert_matcher_v1_result_to_v2(matches, type="param") + + def magic_color_matches(self, text: str) -> List[str]: + """Match color schemes for %colors magic. + + .. deprecated:: 8.6 + You can use :meth:`magic_color_matcher` instead. + """ + texts = text.split() + if text.endswith(' '): + # .split() strips off the trailing whitespace. Add '' back + # so that: '%colors ' -> ['%colors', ''] + texts.append('') + + if len(texts) == 2 and (texts[0] == 'colors' or texts[0] == '%colors'): + prefix = texts[1] + return [ color for color in InspectColors.keys() + if color.startswith(prefix) ] + return [] + + @context_matcher(identifier="IPCompleter.jedi_matcher") + def _jedi_matcher(self, context: CompletionContext) -> _JediMatcherResult: + matches = self._jedi_matches( + cursor_column=context.cursor_position, + cursor_line=context.cursor_line, + text=context.full_text, + ) + return { + "completions": matches, + # static analysis should not suppress other matchers + "suppress": False, + } + + def _jedi_matches( + self, cursor_column: int, cursor_line: int, text: str + ) -> Iterator[_JediCompletionLike]: + """ + Return a list of :any:`jedi.api.Completion`s object from a ``text`` and + cursor position. + + Parameters + ---------- + cursor_column : int + column position of the cursor in ``text``, 0-indexed. + cursor_line : int + line position of the cursor in ``text``, 0-indexed + text : str + text to complete + + Notes + ----- + If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion` + object containing a string with the Jedi debug information attached. + + .. deprecated:: 8.6 + You can use :meth:`_jedi_matcher` instead. + """ + namespaces = [self.namespace] + if self.global_namespace is not None: + namespaces.append(self.global_namespace) + + completion_filter = lambda x:x + offset = cursor_to_position(text, cursor_line, cursor_column) + # filter output if we are completing for object members + if offset: + pre = text[offset-1] + if pre == '.': + if self.omit__names == 2: + completion_filter = lambda c:not c.name.startswith('_') + elif self.omit__names == 1: + completion_filter = lambda c:not (c.name.startswith('__') and c.name.endswith('__')) + elif self.omit__names == 0: + completion_filter = lambda x:x + else: + raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) + + interpreter = jedi.Interpreter(text[:offset], namespaces) + try_jedi = True + + try: + # find the first token in the current tree -- if it is a ' or " then we are in a string + completing_string = False + try: + first_child = next(c for c in interpreter._get_module().tree_node.children if hasattr(c, 'value')) + except StopIteration: + pass + else: + # note the value may be ', ", or it may also be ''' or """, or + # in some cases, """what/you/typed..., but all of these are + # strings. + completing_string = len(first_child.value) > 0 and first_child.value[0] in {"'", '"'} + + # if we are in a string jedi is likely not the right candidate for + # now. Skip it. + try_jedi = not completing_string + except Exception as e: + # many of things can go wrong, we are using private API just don't crash. + if self.debug: + print("Error detecting if completing a non-finished string :", e, '|') + + if not try_jedi: + return iter([]) + try: + return filter(completion_filter, interpreter.complete(column=cursor_column, line=cursor_line + 1)) + except Exception as e: + if self.debug: + return iter( + [ + _FakeJediCompletion( + 'Oops Jedi has crashed, please report a bug with the following:\n"""\n%s\ns"""' + % (e) + ) + ] + ) + else: + return iter([]) + + @completion_matcher(api_version=1) + def python_matches(self, text: str) -> Iterable[str]: + """Match attributes or global python names""" + if "." in text: + try: + matches = self.attr_matches(text) + if text.endswith('.') and self.omit__names: + if self.omit__names == 1: + # true if txt is _not_ a __ name, false otherwise: + no__name = (lambda txt: + re.match(r'.*\.__.*?__',txt) is None) + else: + # true if txt is _not_ a _ name, false otherwise: + no__name = (lambda txt: + re.match(r'\._.*?',txt[txt.rindex('.'):]) is None) + matches = filter(no__name, matches) + except NameError: + # catches . + matches = [] + else: + matches = self.global_matches(text) + return matches + + def _default_arguments_from_docstring(self, doc): + """Parse the first line of docstring for call signature. + + Docstring should be of the form 'min(iterable[, key=func])\n'. + It can also parse cython docstring of the form + 'Minuit.migrad(self, int ncall=10000, resume=True, int nsplit=1)'. + """ + if doc is None: + return [] + + #care only the firstline + line = doc.lstrip().splitlines()[0] + + #p = re.compile(r'^[\w|\s.]+\(([^)]*)\).*') + #'min(iterable[, key=func])\n' -> 'iterable[, key=func]' + sig = self.docstring_sig_re.search(line) + if sig is None: + return [] + # iterable[, key=func]' -> ['iterable[' ,' key=func]'] + sig = sig.groups()[0].split(',') + ret = [] + for s in sig: + #re.compile(r'[\s|\[]*(\w+)(?:\s*=\s*.*)') + ret += self.docstring_kwd_re.findall(s) + return ret + + def _default_arguments(self, obj): + """Return the list of default arguments of obj if it is callable, + or empty list otherwise.""" + call_obj = obj + ret = [] + if inspect.isbuiltin(obj): + pass + elif not (inspect.isfunction(obj) or inspect.ismethod(obj)): + if inspect.isclass(obj): + #for cython embedsignature=True the constructor docstring + #belongs to the object itself not __init__ + ret += self._default_arguments_from_docstring( + getattr(obj, '__doc__', '')) + # for classes, check for __init__,__new__ + call_obj = (getattr(obj, '__init__', None) or + getattr(obj, '__new__', None)) + # for all others, check if they are __call__able + elif hasattr(obj, '__call__'): + call_obj = obj.__call__ + ret += self._default_arguments_from_docstring( + getattr(call_obj, '__doc__', '')) + + _keeps = (inspect.Parameter.KEYWORD_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD) + + try: + sig = inspect.signature(obj) + ret.extend(k for k, v in sig.parameters.items() if + v.kind in _keeps) + except ValueError: + pass + + return list(set(ret)) + + @context_matcher() + def python_func_kw_matcher(self, context: CompletionContext) -> SimpleMatcherResult: + """Match named parameters (kwargs) of the last open function.""" + matches = self.python_func_kw_matches(context.token) + return _convert_matcher_v1_result_to_v2(matches, type="param") + + def python_func_kw_matches(self, text): + """Match named parameters (kwargs) of the last open function. + + .. deprecated:: 8.6 + You can use :meth:`python_func_kw_matcher` instead. + """ + + if "." in text: # a parameter cannot be dotted + return [] + try: regexp = self.__funcParamsRegex + except AttributeError: + regexp = self.__funcParamsRegex = re.compile(r''' + '.*?(?,a=1)", the candidate is "foo" + tokens = regexp.findall(self.text_until_cursor) + iterTokens = reversed(tokens); openPar = 0 + + for token in iterTokens: + if token == ')': + openPar -= 1 + elif token == '(': + openPar += 1 + if openPar > 0: + # found the last unclosed parenthesis + break + else: + return [] + # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" ) + ids = [] + isId = re.compile(r'\w+$').match + + while True: + try: + ids.append(next(iterTokens)) + if not isId(ids[-1]): + ids.pop(); break + if not next(iterTokens) == '.': + break + except StopIteration: + break + + # Find all named arguments already assigned to, as to avoid suggesting + # them again + usedNamedArgs = set() + par_level = -1 + for token, next_token in zip(tokens, tokens[1:]): + if token == '(': + par_level += 1 + elif token == ')': + par_level -= 1 + + if par_level != 0: + continue + + if next_token != '=': + continue + + usedNamedArgs.add(token) + + argMatches = [] + try: + callableObj = '.'.join(ids[::-1]) + namedArgs = self._default_arguments(eval(callableObj, + self.namespace)) + + # Remove used named arguments from the list, no need to show twice + for namedArg in set(namedArgs) - usedNamedArgs: + if namedArg.startswith(text): + argMatches.append("%s=" %namedArg) + except: + pass + + return argMatches + + @staticmethod + def _get_keys(obj: Any) -> List[Any]: + # Objects can define their own completions by defining an + # _ipy_key_completions_() method. + method = get_real_method(obj, '_ipython_key_completions_') + if method is not None: + return method() + + # Special case some common in-memory dict-like types + if isinstance(obj, dict) or _safe_isinstance(obj, "pandas", "DataFrame"): + try: + return list(obj.keys()) + except Exception: + return [] + elif _safe_isinstance(obj, "pandas", "core", "indexing", "_LocIndexer"): + try: + return list(obj.obj.keys()) + except Exception: + return [] + elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ + _safe_isinstance(obj, 'numpy', 'void'): + return obj.dtype.names or [] + return [] + + @context_matcher() + def dict_key_matcher(self, context: CompletionContext) -> SimpleMatcherResult: + """Match string keys in a dictionary, after e.g. ``foo[``.""" + matches = self.dict_key_matches(context.token) + return _convert_matcher_v1_result_to_v2( + matches, type="dict key", suppress_if_matches=True + ) + + def dict_key_matches(self, text: str) -> List[str]: + """Match string keys in a dictionary, after e.g. ``foo[``. + + .. deprecated:: 8.6 + You can use :meth:`dict_key_matcher` instead. + """ + + # Short-circuit on closed dictionary (regular expression would + # not match anyway, but would take quite a while). + if self.text_until_cursor.strip().endswith("]"): + return [] + + match = DICT_MATCHER_REGEX.search(self.text_until_cursor) + + if match is None: + return [] + + expr, prior_tuple_keys, key_prefix = match.groups() + + obj = self._evaluate_expr(expr) + + if obj is not_found: + return [] + + keys = self._get_keys(obj) + if not keys: + return keys + + tuple_prefix = guarded_eval( + prior_tuple_keys, + EvaluationContext( + globals=self.global_namespace, + locals=self.namespace, + evaluation=self.evaluation, + in_subscript=True, + ), + ) + + closing_quote, token_offset, matches = match_dict_keys( + keys, key_prefix, self.splitter.delims, extra_prefix=tuple_prefix + ) + if not matches: + return [] + + # get the cursor position of + # - the text being completed + # - the start of the key text + # - the start of the completion + text_start = len(self.text_until_cursor) - len(text) + if key_prefix: + key_start = match.start(3) + completion_start = key_start + token_offset + else: + key_start = completion_start = match.end() + + # grab the leading prefix, to make sure all completions start with `text` + if text_start > key_start: + leading = '' + else: + leading = text[text_start:completion_start] + + # append closing quote and bracket as appropriate + # this is *not* appropriate if the opening quote or bracket is outside + # the text given to this method, e.g. `d["""a\nt + can_close_quote = False + can_close_bracket = False + + continuation = self.line_buffer[len(self.text_until_cursor) :].strip() + + if continuation.startswith(closing_quote): + # do not close if already closed, e.g. `d['a'` + continuation = continuation[len(closing_quote) :] + else: + can_close_quote = True + + continuation = continuation.strip() + + # e.g. `pandas.DataFrame` has different tuple indexer behaviour, + # handling it is out of scope, so let's avoid appending suffixes. + has_known_tuple_handling = isinstance(obj, dict) + + can_close_bracket = ( + not continuation.startswith("]") and self.auto_close_dict_keys + ) + can_close_tuple_item = ( + not continuation.startswith(",") + and has_known_tuple_handling + and self.auto_close_dict_keys + ) + can_close_quote = can_close_quote and self.auto_close_dict_keys + + # fast path if closing qoute should be appended but not suffix is allowed + if not can_close_quote and not can_close_bracket and closing_quote: + return [leading + k for k in matches] + + results = [] + + end_of_tuple_or_item = _DictKeyState.END_OF_TUPLE | _DictKeyState.END_OF_ITEM + + for k, state_flag in matches.items(): + result = leading + k + if can_close_quote and closing_quote: + result += closing_quote + + if state_flag == end_of_tuple_or_item: + # We do not know which suffix to add, + # e.g. both tuple item and string + # match this item. + pass + + if state_flag in end_of_tuple_or_item and can_close_bracket: + result += "]" + if state_flag == _DictKeyState.IN_TUPLE and can_close_tuple_item: + result += ", " + results.append(result) + return results + + @context_matcher() + def unicode_name_matcher(self, context: CompletionContext): + """Same as :any:`unicode_name_matches`, but adopted to new Matcher API.""" + fragment, matches = self.unicode_name_matches(context.text_until_cursor) + return _convert_matcher_v1_result_to_v2( + matches, type="unicode", fragment=fragment, suppress_if_matches=True + ) + + @staticmethod + def unicode_name_matches(text: str) -> Tuple[str, List[str]]: + """Match Latex-like syntax for unicode characters base + on the name of the character. + + This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` + + Works only on valid python 3 identifier, or on combining characters that + will combine to form a valid identifier. + """ + slashpos = text.rfind('\\') + if slashpos > -1: + s = text[slashpos+1:] + try : + unic = unicodedata.lookup(s) + # allow combining chars + if ('a'+unic).isidentifier(): + return '\\'+s,[unic] + except KeyError: + pass + return '', [] + + @context_matcher() + def latex_name_matcher(self, context: CompletionContext): + """Match Latex syntax for unicode characters. + + This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` + """ + fragment, matches = self.latex_matches(context.text_until_cursor) + return _convert_matcher_v1_result_to_v2( + matches, type="latex", fragment=fragment, suppress_if_matches=True + ) + + def latex_matches(self, text: str) -> Tuple[str, Sequence[str]]: + """Match Latex syntax for unicode characters. + + This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` + + .. deprecated:: 8.6 + You can use :meth:`latex_name_matcher` instead. + """ + slashpos = text.rfind('\\') + if slashpos > -1: + s = text[slashpos:] + if s in latex_symbols: + # Try to complete a full latex symbol to unicode + # \\alpha -> α + return s, [latex_symbols[s]] + else: + # If a user has partially typed a latex symbol, give them + # a full list of options \al -> [\aleph, \alpha] + matches = [k for k in latex_symbols if k.startswith(s)] + if matches: + return s, matches + return '', () + + @context_matcher() + def custom_completer_matcher(self, context): + """Dispatch custom completer. + + If a match is found, suppresses all other matchers except for Jedi. + """ + matches = self.dispatch_custom_completer(context.token) or [] + result = _convert_matcher_v1_result_to_v2( + matches, type=_UNKNOWN_TYPE, suppress_if_matches=True + ) + result["ordered"] = True + result["do_not_suppress"] = {_get_matcher_id(self._jedi_matcher)} + return result + + def dispatch_custom_completer(self, text): + """ + .. deprecated:: 8.6 + You can use :meth:`custom_completer_matcher` instead. + """ + if not self.custom_completers: + return + + line = self.line_buffer + if not line.strip(): + return None + + # Create a little structure to pass all the relevant information about + # the current completion to any custom completer. + event = SimpleNamespace() + event.line = line + event.symbol = text + cmd = line.split(None,1)[0] + event.command = cmd + event.text_until_cursor = self.text_until_cursor + + # for foo etc, try also to find completer for %foo + if not cmd.startswith(self.magic_escape): + try_magic = self.custom_completers.s_matches( + self.magic_escape + cmd) + else: + try_magic = [] + + for c in itertools.chain(self.custom_completers.s_matches(cmd), + try_magic, + self.custom_completers.flat_matches(self.text_until_cursor)): + try: + res = c(event) + if res: + # first, try case sensitive match + withcase = [r for r in res if r.startswith(text)] + if withcase: + return withcase + # if none, then case insensitive ones are ok too + text_low = text.lower() + return [r for r in res if r.lower().startswith(text_low)] + except TryNext: + pass + except KeyboardInterrupt: + """ + If custom completer take too long, + let keyboard interrupt abort and return nothing. + """ + break + + return None + + def completions(self, text: str, offset: int)->Iterator[Completion]: + """ + Returns an iterator over the possible completions + + .. warning:: + + Unstable + + This function is unstable, API may change without warning. + It will also raise unless use in proper context manager. + + Parameters + ---------- + text : str + Full text of the current input, multi line string. + offset : int + Integer representing the position of the cursor in ``text``. Offset + is 0-based indexed. + + Yields + ------ + Completion + + Notes + ----- + The cursor on a text can either be seen as being "in between" + characters or "On" a character depending on the interface visible to + the user. For consistency the cursor being on "in between" characters X + and Y is equivalent to the cursor being "on" character Y, that is to say + the character the cursor is on is considered as being after the cursor. + + Combining characters may span more that one position in the + text. + + .. note:: + + If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--`` + fake Completion token to distinguish completion returned by Jedi + and usual IPython completion. + + .. note:: + + Completions are not completely deduplicated yet. If identical + completions are coming from different sources this function does not + ensure that each completion object will only be present once. + """ + warnings.warn("_complete is a provisional API (as of IPython 6.0). " + "It may change without warnings. " + "Use in corresponding context manager.", + category=ProvisionalCompleterWarning, stacklevel=2) + + seen = set() + profiler:Optional[cProfile.Profile] + try: + if self.profile_completions: + import cProfile + profiler = cProfile.Profile() + profiler.enable() + else: + profiler = None + + for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000): + if c and (c in seen): + continue + yield c + seen.add(c) + except KeyboardInterrupt: + """if completions take too long and users send keyboard interrupt, + do not crash and return ASAP. """ + pass + finally: + if profiler is not None: + profiler.disable() + ensure_dir_exists(self.profiler_output_dir) + output_path = os.path.join(self.profiler_output_dir, str(uuid.uuid4())) + print("Writing profiler output to", output_path) + profiler.dump_stats(output_path) + + def _completions(self, full_text: str, offset: int, *, _timeout) -> Iterator[Completion]: + """ + Core completion module.Same signature as :any:`completions`, with the + extra `timeout` parameter (in seconds). + + Computing jedi's completion ``.type`` can be quite expensive (it is a + lazy property) and can require some warm-up, more warm up than just + computing the ``name`` of a completion. The warm-up can be : + + - Long warm-up the first time a module is encountered after + install/update: actually build parse/inference tree. + + - first time the module is encountered in a session: load tree from + disk. + + We don't want to block completions for tens of seconds so we give the + completer a "budget" of ``_timeout`` seconds per invocation to compute + completions types, the completions that have not yet been computed will + be marked as "unknown" an will have a chance to be computed next round + are things get cached. + + Keep in mind that Jedi is not the only thing treating the completion so + keep the timeout short-ish as if we take more than 0.3 second we still + have lots of processing to do. + + """ + deadline = time.monotonic() + _timeout + + before = full_text[:offset] + cursor_line, cursor_column = position_to_cursor(full_text, offset) + + jedi_matcher_id = _get_matcher_id(self._jedi_matcher) + + def is_non_jedi_result( + result: MatcherResult, identifier: str + ) -> TypeGuard[SimpleMatcherResult]: + return identifier != jedi_matcher_id + + results = self._complete( + full_text=full_text, cursor_line=cursor_line, cursor_pos=cursor_column + ) + + non_jedi_results: Dict[str, SimpleMatcherResult] = { + identifier: result + for identifier, result in results.items() + if is_non_jedi_result(result, identifier) + } + + jedi_matches = ( + cast(_JediMatcherResult, results[jedi_matcher_id])["completions"] + if jedi_matcher_id in results + else () + ) + + iter_jm = iter(jedi_matches) + if _timeout: + for jm in iter_jm: + try: + type_ = jm.type + except Exception: + if self.debug: + print("Error in Jedi getting type of ", jm) + type_ = None + delta = len(jm.name_with_symbols) - len(jm.complete) + if type_ == 'function': + signature = _make_signature(jm) + else: + signature = '' + yield Completion(start=offset - delta, + end=offset, + text=jm.name_with_symbols, + type=type_, + signature=signature, + _origin='jedi') + + if time.monotonic() > deadline: + break + + for jm in iter_jm: + delta = len(jm.name_with_symbols) - len(jm.complete) + yield Completion( + start=offset - delta, + end=offset, + text=jm.name_with_symbols, + type=_UNKNOWN_TYPE, # don't compute type for speed + _origin="jedi", + signature="", + ) + + # TODO: + # Suppress this, right now just for debug. + if jedi_matches and non_jedi_results and self.debug: + some_start_offset = before.rfind( + next(iter(non_jedi_results.values()))["matched_fragment"] + ) + yield Completion( + start=some_start_offset, + end=offset, + text="--jedi/ipython--", + _origin="debug", + type="none", + signature="", + ) + + ordered: List[Completion] = [] + sortable: List[Completion] = [] + + for origin, result in non_jedi_results.items(): + matched_text = result["matched_fragment"] + start_offset = before.rfind(matched_text) + is_ordered = result.get("ordered", False) + container = ordered if is_ordered else sortable + + # I'm unsure if this is always true, so let's assert and see if it + # crash + assert before.endswith(matched_text) + + for simple_completion in result["completions"]: + completion = Completion( + start=start_offset, + end=offset, + text=simple_completion.text, + _origin=origin, + signature="", + type=simple_completion.type or _UNKNOWN_TYPE, + ) + container.append(completion) + + yield from list(self._deduplicate(ordered + self._sort(sortable)))[ + :MATCHES_LIMIT + ] + + def complete(self, text=None, line_buffer=None, cursor_pos=None) -> Tuple[str, Sequence[str]]: + """Find completions for the given text and line context. + + Note that both the text and the line_buffer are optional, but at least + one of them must be given. + + Parameters + ---------- + text : string, optional + Text to perform the completion on. If not given, the line buffer + is split using the instance's CompletionSplitter object. + line_buffer : string, optional + If not given, the completer attempts to obtain the current line + buffer via readline. This keyword allows clients which are + requesting for text completions in non-readline contexts to inform + the completer of the entire text. + cursor_pos : int, optional + Index of the cursor in the full line buffer. Should be provided by + remote frontends where kernel has no access to frontend state. + + Returns + ------- + Tuple of two items: + text : str + Text that was actually used in the completion. + matches : list + A list of completion matches. + + Notes + ----- + This API is likely to be deprecated and replaced by + :any:`IPCompleter.completions` in the future. + + """ + warnings.warn('`Completer.complete` is pending deprecation since ' + 'IPython 6.0 and will be replaced by `Completer.completions`.', + PendingDeprecationWarning) + # potential todo, FOLD the 3rd throw away argument of _complete + # into the first 2 one. + # TODO: Q: does the above refer to jedi completions (i.e. 0-indexed?) + # TODO: should we deprecate now, or does it stay? + + results = self._complete( + line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0 + ) + + jedi_matcher_id = _get_matcher_id(self._jedi_matcher) + + return self._arrange_and_extract( + results, + # TODO: can we confirm that excluding Jedi here was a deliberate choice in previous version? + skip_matchers={jedi_matcher_id}, + # this API does not support different start/end positions (fragments of token). + abort_if_offset_changes=True, + ) + + def _arrange_and_extract( + self, + results: Dict[str, MatcherResult], + skip_matchers: Set[str], + abort_if_offset_changes: bool, + ): + + sortable: List[AnyMatcherCompletion] = [] + ordered: List[AnyMatcherCompletion] = [] + most_recent_fragment = None + for identifier, result in results.items(): + if identifier in skip_matchers: + continue + if not result["completions"]: + continue + if not most_recent_fragment: + most_recent_fragment = result["matched_fragment"] + if ( + abort_if_offset_changes + and result["matched_fragment"] != most_recent_fragment + ): + break + if result.get("ordered", False): + ordered.extend(result["completions"]) + else: + sortable.extend(result["completions"]) + + if not most_recent_fragment: + most_recent_fragment = "" # to satisfy typechecker (and just in case) + + return most_recent_fragment, [ + m.text for m in self._deduplicate(ordered + self._sort(sortable)) + ] + + def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, + full_text=None) -> _CompleteResult: + """ + Like complete but can also returns raw jedi completions as well as the + origin of the completion text. This could (and should) be made much + cleaner but that will be simpler once we drop the old (and stateful) + :any:`complete` API. + + With current provisional API, cursor_pos act both (depending on the + caller) as the offset in the ``text`` or ``line_buffer``, or as the + ``column`` when passing multiline strings this could/should be renamed + but would add extra noise. + + Parameters + ---------- + cursor_line + Index of the line the cursor is on. 0 indexed. + cursor_pos + Position of the cursor in the current line/line_buffer/text. 0 + indexed. + line_buffer : optional, str + The current line the cursor is in, this is mostly due to legacy + reason that readline could only give a us the single current line. + Prefer `full_text`. + text : str + The current "token" the cursor is in, mostly also for historical + reasons. as the completer would trigger only after the current line + was parsed. + full_text : str + Full text of the current cell. + + Returns + ------- + An ordered dictionary where keys are identifiers of completion + matchers and values are ``MatcherResult``s. + """ + + # if the cursor position isn't given, the only sane assumption we can + # make is that it's at the end of the line (the common case) + if cursor_pos is None: + cursor_pos = len(line_buffer) if text is None else len(text) + + if self.use_main_ns: + self.namespace = __main__.__dict__ + + # if text is either None or an empty string, rely on the line buffer + if (not line_buffer) and full_text: + line_buffer = full_text.split('\n')[cursor_line] + if not text: # issue #11508: check line_buffer before calling split_line + text = ( + self.splitter.split_line(line_buffer, cursor_pos) if line_buffer else "" + ) + + # If no line buffer is given, assume the input text is all there was + if line_buffer is None: + line_buffer = text + + # deprecated - do not use `line_buffer` in new code. + self.line_buffer = line_buffer + self.text_until_cursor = self.line_buffer[:cursor_pos] + + if not full_text: + full_text = line_buffer + + context = CompletionContext( + full_text=full_text, + cursor_position=cursor_pos, + cursor_line=cursor_line, + token=text, + limit=MATCHES_LIMIT, + ) + + # Start with a clean slate of completions + results: Dict[str, MatcherResult] = {} + + jedi_matcher_id = _get_matcher_id(self._jedi_matcher) + + suppressed_matchers: Set[str] = set() + + matchers = { + _get_matcher_id(matcher): matcher + for matcher in sorted( + self.matchers, key=_get_matcher_priority, reverse=True + ) + } + + for matcher_id, matcher in matchers.items(): + matcher_id = _get_matcher_id(matcher) + + if matcher_id in self.disable_matchers: + continue + + if matcher_id in results: + warnings.warn(f"Duplicate matcher ID: {matcher_id}.") + + if matcher_id in suppressed_matchers: + continue + + result: MatcherResult + try: + if _is_matcher_v1(matcher): + result = _convert_matcher_v1_result_to_v2( + matcher(text), type=_UNKNOWN_TYPE + ) + elif _is_matcher_v2(matcher): + result = matcher(context) + else: + api_version = _get_matcher_api_version(matcher) + raise ValueError(f"Unsupported API version {api_version}") + except: + # Show the ugly traceback if the matcher causes an + # exception, but do NOT crash the kernel! + sys.excepthook(*sys.exc_info()) + continue + + # set default value for matched fragment if suffix was not selected. + result["matched_fragment"] = result.get("matched_fragment", context.token) + + if not suppressed_matchers: + suppression_recommended: Union[bool, Set[str]] = result.get( + "suppress", False + ) + + suppression_config = ( + self.suppress_competing_matchers.get(matcher_id, None) + if isinstance(self.suppress_competing_matchers, dict) + else self.suppress_competing_matchers + ) + should_suppress = ( + (suppression_config is True) + or (suppression_recommended and (suppression_config is not False)) + ) and has_any_completions(result) + + if should_suppress: + suppression_exceptions: Set[str] = result.get( + "do_not_suppress", set() + ) + if isinstance(suppression_recommended, Iterable): + to_suppress = set(suppression_recommended) + else: + to_suppress = set(matchers) + suppressed_matchers = to_suppress - suppression_exceptions + + new_results = {} + for previous_matcher_id, previous_result in results.items(): + if previous_matcher_id not in suppressed_matchers: + new_results[previous_matcher_id] = previous_result + results = new_results + + results[matcher_id] = result + + _, matches = self._arrange_and_extract( + results, + # TODO Jedi completions non included in legacy stateful API; was this deliberate or omission? + # if it was omission, we can remove the filtering step, otherwise remove this comment. + skip_matchers={jedi_matcher_id}, + abort_if_offset_changes=False, + ) + + # populate legacy stateful API + self.matches = matches + + return results + + @staticmethod + def _deduplicate( + matches: Sequence[AnyCompletion], + ) -> Iterable[AnyCompletion]: + filtered_matches: Dict[str, AnyCompletion] = {} + for match in matches: + text = match.text + if ( + text not in filtered_matches + or filtered_matches[text].type == _UNKNOWN_TYPE + ): + filtered_matches[text] = match + + return filtered_matches.values() + + @staticmethod + def _sort(matches: Sequence[AnyCompletion]): + return sorted(matches, key=lambda x: completions_sorting_key(x.text)) + + @context_matcher() + def fwd_unicode_matcher(self, context: CompletionContext): + """Same as :any:`fwd_unicode_match`, but adopted to new Matcher API.""" + # TODO: use `context.limit` to terminate early once we matched the maximum + # number that will be used downstream; can be added as an optional to + # `fwd_unicode_match(text: str, limit: int = None)` or we could re-implement here. + fragment, matches = self.fwd_unicode_match(context.text_until_cursor) + return _convert_matcher_v1_result_to_v2( + matches, type="unicode", fragment=fragment, suppress_if_matches=True + ) + + def fwd_unicode_match(self, text: str) -> Tuple[str, Sequence[str]]: + """ + Forward match a string starting with a backslash with a list of + potential Unicode completions. + + Will compute list of Unicode character names on first call and cache it. + + .. deprecated:: 8.6 + You can use :meth:`fwd_unicode_matcher` instead. + + Returns + ------- + At tuple with: + - matched text (empty if no matches) + - list of potential completions, empty tuple otherwise) + """ + # TODO: self.unicode_names is here a list we traverse each time with ~100k elements. + # We could do a faster match using a Trie. + + # Using pygtrie the following seem to work: + + # s = PrefixSet() + + # for c in range(0,0x10FFFF + 1): + # try: + # s.add(unicodedata.name(chr(c))) + # except ValueError: + # pass + # [''.join(k) for k in s.iter(prefix)] + + # But need to be timed and adds an extra dependency. + + slashpos = text.rfind('\\') + # if text starts with slash + if slashpos > -1: + # PERF: It's important that we don't access self._unicode_names + # until we're inside this if-block. _unicode_names is lazily + # initialized, and it takes a user-noticeable amount of time to + # initialize it, so we don't want to initialize it unless we're + # actually going to use it. + s = text[slashpos + 1 :] + sup = s.upper() + candidates = [x for x in self.unicode_names if x.startswith(sup)] + if candidates: + return s, candidates + candidates = [x for x in self.unicode_names if sup in x] + if candidates: + return s, candidates + splitsup = sup.split(" ") + candidates = [ + x for x in self.unicode_names if all(u in x for u in splitsup) + ] + if candidates: + return s, candidates + + return "", () + + # if text does not start with slash + else: + return '', () + + @property + def unicode_names(self) -> List[str]: + """List of names of unicode code points that can be completed. + + The list is lazily initialized on first access. + """ + if self._unicode_names is None: + names = [] + for c in range(0,0x10FFFF + 1): + try: + names.append(unicodedata.name(chr(c))) + except ValueError: + pass + self._unicode_names = _unicode_name_compute(_UNICODE_RANGES) + + return self._unicode_names + +def _unicode_name_compute(ranges:List[Tuple[int,int]]) -> List[str]: + names = [] + for start,stop in ranges: + for c in range(start, stop) : + try: + names.append(unicodedata.name(chr(c))) + except ValueError: + pass + return names diff --git a/myenv/lib/python3.10/site-packages/IPython/core/completerlib.py b/myenv/lib/python3.10/site-packages/IPython/core/completerlib.py new file mode 100644 index 000000000..0ca97e7b7 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/completerlib.py @@ -0,0 +1,370 @@ +# encoding: utf-8 +"""Implementations for various useful completers. + +These are all loaded by default by IPython. +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import glob +import inspect +import os +import re +import sys +from importlib import import_module +from importlib.machinery import all_suffixes + + +# Third-party imports +from time import time +from zipimport import zipimporter + +# Our own imports +from .completer import expand_user, compress_user +from .error import TryNext +from ..utils._process_common import arg_split + +# FIXME: this should be pulled in with the right call via the component system +from IPython import get_ipython + +from typing import List + +#----------------------------------------------------------------------------- +# Globals and constants +#----------------------------------------------------------------------------- +_suffixes = all_suffixes() + +# Time in seconds after which the rootmodules will be stored permanently in the +# ipython ip.db database (kept in the user's .ipython dir). +TIMEOUT_STORAGE = 2 + +# Time in seconds after which we give up +TIMEOUT_GIVEUP = 20 + +# Regular expression for the python import statement +import_re = re.compile(r'(?P[^\W\d]\w*?)' + r'(?P[/\\]__init__)?' + r'(?P%s)$' % + r'|'.join(re.escape(s) for s in _suffixes)) + +# RE for the ipython %run command (python + ipython scripts) +magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$') + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +def module_list(path): + """ + Return the list containing the names of the modules available in the given + folder. + """ + # sys.path has the cwd as an empty string, but isdir/listdir need it as '.' + if path == '': + path = '.' + + # A few local constants to be used in loops below + pjoin = os.path.join + + if os.path.isdir(path): + # Build a list of all files in the directory and all files + # in its subdirectories. For performance reasons, do not + # recurse more than one level into subdirectories. + files = [] + for root, dirs, nondirs in os.walk(path, followlinks=True): + subdir = root[len(path)+1:] + if subdir: + files.extend(pjoin(subdir, f) for f in nondirs) + dirs[:] = [] # Do not recurse into additional subdirectories. + else: + files.extend(nondirs) + + else: + try: + files = list(zipimporter(path)._files.keys()) + except: + files = [] + + # Build a list of modules which match the import_re regex. + modules = [] + for f in files: + m = import_re.match(f) + if m: + modules.append(m.group('name')) + return list(set(modules)) + + +def get_root_modules(): + """ + Returns a list containing the names of all the modules available in the + folders of the pythonpath. + + ip.db['rootmodules_cache'] maps sys.path entries to list of modules. + """ + ip = get_ipython() + if ip is None: + # No global shell instance to store cached list of modules. + # Don't try to scan for modules every time. + return list(sys.builtin_module_names) + + rootmodules_cache = ip.db.get('rootmodules_cache', {}) + rootmodules = list(sys.builtin_module_names) + start_time = time() + store = False + for path in sys.path: + try: + modules = rootmodules_cache[path] + except KeyError: + modules = module_list(path) + try: + modules.remove('__init__') + except ValueError: + pass + if path not in ('', '.'): # cwd modules should not be cached + rootmodules_cache[path] = modules + if time() - start_time > TIMEOUT_STORAGE and not store: + store = True + print("\nCaching the list of root modules, please wait!") + print("(This will only be done once - type '%rehashx' to " + "reset cache!)\n") + sys.stdout.flush() + if time() - start_time > TIMEOUT_GIVEUP: + print("This is taking too long, we give up.\n") + return [] + rootmodules.extend(modules) + if store: + ip.db['rootmodules_cache'] = rootmodules_cache + rootmodules = list(set(rootmodules)) + return rootmodules + + +def is_importable(module, attr, only_modules): + if only_modules: + return inspect.ismodule(getattr(module, attr)) + else: + return not(attr[:2] == '__' and attr[-2:] == '__') + +def is_possible_submodule(module, attr): + try: + obj = getattr(module, attr) + except AttributeError: + # Is possilby an unimported submodule + return True + except TypeError: + # https://github.com/ipython/ipython/issues/9678 + return False + return inspect.ismodule(obj) + + +def try_import(mod: str, only_modules=False) -> List[str]: + """ + Try to import given module and return list of potential completions. + """ + mod = mod.rstrip('.') + try: + m = import_module(mod) + except: + return [] + + m_is_init = '__init__' in (getattr(m, '__file__', '') or '') + + completions = [] + if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: + completions.extend( [attr for attr in dir(m) if + is_importable(m, attr, only_modules)]) + + m_all = getattr(m, "__all__", []) + if only_modules: + completions.extend(attr for attr in m_all if is_possible_submodule(m, attr)) + else: + completions.extend(m_all) + + if m_is_init: + completions.extend(module_list(os.path.dirname(m.__file__))) + completions_set = {c for c in completions if isinstance(c, str)} + completions_set.discard('__init__') + return list(completions_set) + + +#----------------------------------------------------------------------------- +# Completion-related functions. +#----------------------------------------------------------------------------- + +def quick_completer(cmd, completions): + r""" Easily create a trivial completer for a command. + + Takes either a list of completions, or all completions in string (that will + be split on whitespace). + + Example:: + + [d:\ipython]|1> import ipy_completers + [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) + [d:\ipython]|3> foo b + bar baz + [d:\ipython]|3> foo ba + """ + + if isinstance(completions, str): + completions = completions.split() + + def do_complete(self, event): + return completions + + get_ipython().set_hook('complete_command',do_complete, str_key = cmd) + +def module_completion(line): + """ + Returns a list containing the completion possibilities for an import line. + + The line looks like this : + 'import xml.d' + 'from xml.dom import' + """ + + words = line.split(' ') + nwords = len(words) + + # from whatever -> 'import ' + if nwords == 3 and words[0] == 'from': + return ['import '] + + # 'from xy' or 'import xy' + if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : + if nwords == 1: + return get_root_modules() + mod = words[1].split('.') + if len(mod) < 2: + return get_root_modules() + completion_list = try_import('.'.join(mod[:-1]), True) + return ['.'.join(mod[:-1] + [el]) for el in completion_list] + + # 'from xyz import abc' + if nwords >= 3 and words[0] == 'from': + mod = words[1] + return try_import(mod) + +#----------------------------------------------------------------------------- +# Completers +#----------------------------------------------------------------------------- +# These all have the func(self, event) signature to be used as custom +# completers + +def module_completer(self,event): + """Give completions after user has typed 'import ...' or 'from ...'""" + + # This works in all versions of python. While 2.5 has + # pkgutil.walk_packages(), that particular routine is fairly dangerous, + # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full + # of possibly problematic side effects. + # This search the folders in the sys.path for available modules. + + return module_completion(event.line) + +# FIXME: there's a lot of logic common to the run, cd and builtin file +# completers, that is currently reimplemented in each. + +def magic_run_completer(self, event): + """Complete files that end in .py or .ipy or .ipynb for the %run command. + """ + comps = arg_split(event.line, strict=False) + # relpath should be the current token that we need to complete. + if (len(comps) > 1) and (not event.line.endswith(' ')): + relpath = comps[-1].strip("'\"") + else: + relpath = '' + + #print("\nev=", event) # dbg + #print("rp=", relpath) # dbg + #print('comps=', comps) # dbg + + lglob = glob.glob + isdir = os.path.isdir + relpath, tilde_expand, tilde_val = expand_user(relpath) + + # Find if the user has already typed the first filename, after which we + # should complete on all files, since after the first one other files may + # be arguments to the input script. + + if any(magic_run_re.match(c) for c in comps): + matches = [f.replace('\\','/') + ('/' if isdir(f) else '') + for f in lglob(relpath+'*')] + else: + dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] + pys = [f.replace('\\','/') + for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + + lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')] + + matches = dirs + pys + + #print('run comp:', dirs+pys) # dbg + return [compress_user(p, tilde_expand, tilde_val) for p in matches] + + +def cd_completer(self, event): + """Completer function for cd, which only returns directories.""" + ip = get_ipython() + relpath = event.symbol + + #print(event) # dbg + if event.line.endswith('-b') or ' -b ' in event.line: + # return only bookmark completions + bkms = self.db.get('bookmarks', None) + if bkms: + return bkms.keys() + else: + return [] + + if event.symbol == '-': + width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) + # jump in directory history by number + fmt = '-%0' + width_dh +'d [%s]' + ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] + if len(ents) > 1: + return ents + return [] + + if event.symbol.startswith('--'): + return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] + + # Expand ~ in path and normalize directory separators. + relpath, tilde_expand, tilde_val = expand_user(relpath) + relpath = relpath.replace('\\','/') + + found = [] + for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') + if os.path.isdir(f)]: + if ' ' in d: + # we don't want to deal with any of that, complex code + # for this is elsewhere + raise TryNext + + found.append(d) + + if not found: + if os.path.isdir(relpath): + return [compress_user(relpath, tilde_expand, tilde_val)] + + # if no completions so far, try bookmarks + bks = self.db.get('bookmarks',{}) + bkmatches = [s for s in bks if s.startswith(event.symbol)] + if bkmatches: + return bkmatches + + raise TryNext + + return [compress_user(p, tilde_expand, tilde_val) for p in found] + +def reset_completer(self, event): + "A completer for %reset magic" + return '-f -s in out array dhist'.split() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/crashhandler.py b/myenv/lib/python3.10/site-packages/IPython/core/crashhandler.py new file mode 100644 index 000000000..f60a75bbc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/crashhandler.py @@ -0,0 +1,236 @@ +# encoding: utf-8 +"""sys.excepthook for IPython itself, leaves a detailed report on disk. + +Authors: + +* Fernando Perez +* Brian E. Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2001-2007 Fernando Perez. +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +import traceback +from pprint import pformat +from pathlib import Path + +from IPython.core import ultratb +from IPython.core.release import author_email +from IPython.utils.sysinfo import sys_info +from IPython.utils.py3compat import input + +from IPython.core.release import __version__ as version + +from typing import Optional + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +# Template for the user message. +_default_message_template = """\ +Oops, {app_name} crashed. We do our best to make it stable, but... + +A crash report was automatically generated with the following information: + - A verbatim copy of the crash traceback. + - A copy of your input history during this session. + - Data on your current {app_name} configuration. + +It was left in the file named: +\t'{crash_report_fname}' +If you can email this file to the developers, the information in it will help +them in understanding and correcting the problem. + +You can mail it to: {contact_name} at {contact_email} +with the subject '{app_name} Crash Report'. + +If you want to do it now, the following command will work (under Unix): +mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname} + +In your email, please also include information about: +- The operating system under which the crash happened: Linux, macOS, Windows, + other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2, + Windows 10 Pro), and whether it is 32-bit or 64-bit; +- How {app_name} was installed: using pip or conda, from GitHub, as part of + a Docker container, or other, providing more detail if possible; +- How to reproduce the crash: what exact sequence of instructions can one + input to get the same crash? Ideally, find a minimal yet complete sequence + of instructions that yields the crash. + +To ensure accurate tracking of this issue, please file a report about it at: +{bug_tracker} +""" + +_lite_message_template = """ +If you suspect this is an IPython {version} bug, please report it at: + https://github.com/ipython/ipython/issues +or send an email to the mailing list at {email} + +You can print a more detailed traceback right now with "%tb", or use "%debug" +to interactively debug it. + +Extra-detailed tracebacks for bug-reporting purposes can be enabled via: + {config}Application.verbose_crash=True +""" + + +class CrashHandler(object): + """Customizable crash handlers for IPython applications. + + Instances of this class provide a :meth:`__call__` method which can be + used as a ``sys.excepthook``. The :meth:`__call__` signature is:: + + def __call__(self, etype, evalue, etb) + """ + + message_template = _default_message_template + section_sep = '\n\n'+'*'*75+'\n\n' + + def __init__( + self, + app, + contact_name: Optional[str] = None, + contact_email: Optional[str] = None, + bug_tracker: Optional[str] = None, + show_crash_traceback: bool = True, + call_pdb: bool = False, + ): + """Create a new crash handler + + Parameters + ---------- + app : Application + A running :class:`Application` instance, which will be queried at + crash time for internal information. + contact_name : str + A string with the name of the person to contact. + contact_email : str + A string with the email address of the contact. + bug_tracker : str + A string with the URL for your project's bug tracker. + show_crash_traceback : bool + If false, don't print the crash traceback on stderr, only generate + the on-disk report + call_pdb + Whether to call pdb on crash + + Attributes + ---------- + These instances contain some non-argument attributes which allow for + further customization of the crash handler's behavior. Please see the + source for further details. + + """ + self.crash_report_fname = "Crash_report_%s.txt" % app.name + self.app = app + self.call_pdb = call_pdb + #self.call_pdb = True # dbg + self.show_crash_traceback = show_crash_traceback + self.info = dict(app_name = app.name, + contact_name = contact_name, + contact_email = contact_email, + bug_tracker = bug_tracker, + crash_report_fname = self.crash_report_fname) + + + def __call__(self, etype, evalue, etb): + """Handle an exception, call for compatible with sys.excepthook""" + + # do not allow the crash handler to be called twice without reinstalling it + # this prevents unlikely errors in the crash handling from entering an + # infinite loop. + sys.excepthook = sys.__excepthook__ + + # Report tracebacks shouldn't use color in general (safer for users) + color_scheme = 'NoColor' + + # Use this ONLY for developer debugging (keep commented out for release) + #color_scheme = 'Linux' # dbg + try: + rptdir = self.app.ipython_dir + except: + rptdir = Path.cwd() + if rptdir is None or not Path.is_dir(rptdir): + rptdir = Path.cwd() + report_name = rptdir / self.crash_report_fname + # write the report filename into the instance dict so it can get + # properly expanded out in the user message template + self.crash_report_fname = report_name + self.info['crash_report_fname'] = report_name + TBhandler = ultratb.VerboseTB( + color_scheme=color_scheme, + long_header=1, + call_pdb=self.call_pdb, + ) + if self.call_pdb: + TBhandler(etype,evalue,etb) + return + else: + traceback = TBhandler.text(etype,evalue,etb,context=31) + + # print traceback to screen + if self.show_crash_traceback: + print(traceback, file=sys.stderr) + + # and generate a complete report on disk + try: + report = open(report_name, "w", encoding="utf-8") + except: + print('Could not create crash report on disk.', file=sys.stderr) + return + + with report: + # Inform user on stderr of what happened + print('\n'+'*'*70+'\n', file=sys.stderr) + print(self.message_template.format(**self.info), file=sys.stderr) + + # Construct report on disk + report.write(self.make_report(traceback)) + + input("Hit to quit (your terminal may close):") + + def make_report(self,traceback): + """Return a string containing a crash report.""" + + sec_sep = self.section_sep + + report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n'] + rpt_add = report.append + rpt_add(sys_info()) + + try: + config = pformat(self.app.config) + rpt_add(sec_sep) + rpt_add('Application name: %s\n\n' % self.app_name) + rpt_add('Current user configuration structure:\n\n') + rpt_add(config) + except: + pass + rpt_add(sec_sep+'Crash traceback:\n\n' + traceback) + + return ''.join(report) + + +def crash_handler_lite(etype, evalue, tb): + """a light excepthook, adding a small message to the usual traceback""" + traceback.print_exception(etype, evalue, tb) + + from IPython.core.interactiveshell import InteractiveShell + if InteractiveShell.initialized(): + # we are in a Shell environment, give %magic example + config = "%config " + else: + # we are not in a shell, show generic config + config = "c." + print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr) + diff --git a/myenv/lib/python3.10/site-packages/IPython/core/debugger.py b/myenv/lib/python3.10/site-packages/IPython/core/debugger.py new file mode 100644 index 000000000..73b032874 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/debugger.py @@ -0,0 +1,998 @@ +# -*- coding: utf-8 -*- +""" +Pdb debugger class. + + +This is an extension to PDB which adds a number of new features. +Note that there is also the `IPython.terminal.debugger` class which provides UI +improvements. + +We also strongly recommend to use this via the `ipdb` package, which provides +extra configuration options. + +Among other things, this subclass of PDB: + - supports many IPython magics like pdef/psource + - hide frames in tracebacks based on `__tracebackhide__` + - allows to skip frames based on `__debuggerskip__` + +The skipping and hiding frames are configurable via the `skip_predicates` +command. + +By default, frames from readonly files will be hidden, frames containing +``__tracebackhide__=True`` will be hidden. + +Frames containing ``__debuggerskip__`` will be stepped over, frames who's parent +frames value of ``__debuggerskip__`` is ``True`` will be skipped. + + >>> def helpers_helper(): + ... pass + ... + ... def helper_1(): + ... print("don't step in me") + ... helpers_helpers() # will be stepped over unless breakpoint set. + ... + ... + ... def helper_2(): + ... print("in me neither") + ... + +One can define a decorator that wraps a function between the two helpers: + + >>> def pdb_skipped_decorator(function): + ... + ... + ... def wrapped_fn(*args, **kwargs): + ... __debuggerskip__ = True + ... helper_1() + ... __debuggerskip__ = False + ... result = function(*args, **kwargs) + ... __debuggerskip__ = True + ... helper_2() + ... # setting __debuggerskip__ to False again is not necessary + ... return result + ... + ... return wrapped_fn + +When decorating a function, ipdb will directly step into ``bar()`` by +default: + + >>> @foo_decorator + ... def bar(x, y): + ... return x * y + + +You can toggle the behavior with + + ipdb> skip_predicates debuggerskip false + +or configure it in your ``.pdbrc`` + + + +License +------- + +Modified from the standard pdb.Pdb class to avoid including readline, so that +the command line completion of other programs which include this isn't +damaged. + +In the future, this class will be expanded with improvements over the standard +pdb. + +The original code in this file is mainly lifted out of cmd.py in Python 2.2, +with minor changes. Licensing should therefore be under the standard Python +terms. For details on the PSF (Python Software Foundation) standard license, +see: + +https://docs.python.org/2/license.html + + +All the changes since then are under the same license as IPython. + +""" + +#***************************************************************************** +# +# This file is licensed under the PSF license. +# +# Copyright (C) 2001 Python Software Foundation, www.python.org +# Copyright (C) 2005-2006 Fernando Perez. +# +# +#***************************************************************************** + +import inspect +import linecache +import sys +import re +import os + +from IPython import get_ipython +from IPython.utils import PyColorize +from IPython.utils import coloransi, py3compat +from IPython.core.excolors import exception_colors + +# skip module docstests +__skip_doctest__ = True + +prompt = 'ipdb> ' + +# We have to check this directly from sys.argv, config struct not yet available +from pdb import Pdb as OldPdb + +# Allow the set_trace code to operate outside of an ipython instance, even if +# it does so with some limitations. The rest of this support is implemented in +# the Tracer constructor. + +DEBUGGERSKIP = "__debuggerskip__" + + +def make_arrow(pad): + """generate the leading arrow in front of traceback or debugger""" + if pad >= 2: + return '-'*(pad-2) + '> ' + elif pad == 1: + return '>' + return '' + + +def BdbQuit_excepthook(et, ev, tb, excepthook=None): + """Exception hook which handles `BdbQuit` exceptions. + + All other exceptions are processed using the `excepthook` + parameter. + """ + raise ValueError( + "`BdbQuit_excepthook` is deprecated since version 5.1", + ) + + +def BdbQuit_IPython_excepthook(self, et, ev, tb, tb_offset=None): + raise ValueError( + "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) + + +RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+') + + +def strip_indentation(multiline_string): + return RGX_EXTRA_INDENT.sub('', multiline_string) + + +def decorate_fn_with_doc(new_fn, old_fn, additional_text=""): + """Make new_fn have old_fn's doc string. This is particularly useful + for the ``do_...`` commands that hook into the help system. + Adapted from from a comp.lang.python posting + by Duncan Booth.""" + def wrapper(*args, **kw): + return new_fn(*args, **kw) + if old_fn.__doc__: + wrapper.__doc__ = strip_indentation(old_fn.__doc__) + additional_text + return wrapper + + +class Pdb(OldPdb): + """Modified Pdb class, does not load readline. + + for a standalone version that uses prompt_toolkit, see + `IPython.terminal.debugger.TerminalPdb` and + `IPython.terminal.debugger.set_trace()` + + + This debugger can hide and skip frames that are tagged according to some predicates. + See the `skip_predicates` commands. + + """ + + default_predicates = { + "tbhide": True, + "readonly": False, + "ipython_internal": True, + "debuggerskip": True, + } + + def __init__(self, completekey=None, stdin=None, stdout=None, context=5, **kwargs): + """Create a new IPython debugger. + + Parameters + ---------- + completekey : default None + Passed to pdb.Pdb. + stdin : default None + Passed to pdb.Pdb. + stdout : default None + Passed to pdb.Pdb. + context : int + Number of lines of source code context to show when + displaying stacktrace information. + **kwargs + Passed to pdb.Pdb. + + Notes + ----- + The possibilities are python version dependent, see the python + docs for more info. + """ + + # Parent constructor: + try: + self.context = int(context) + if self.context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError) as e: + raise ValueError("Context must be a positive integer") from e + + # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`. + OldPdb.__init__(self, completekey, stdin, stdout, **kwargs) + + # IPython changes... + self.shell = get_ipython() + + if self.shell is None: + save_main = sys.modules['__main__'] + # No IPython instance running, we must create one + from IPython.terminal.interactiveshell import \ + TerminalInteractiveShell + self.shell = TerminalInteractiveShell.instance() + # needed by any code which calls __import__("__main__") after + # the debugger was entered. See also #9941. + sys.modules["__main__"] = save_main + + + color_scheme = self.shell.colors + + self.aliases = {} + + # Create color table: we copy the default one from the traceback + # module and add a few attributes needed for debugging + self.color_scheme_table = exception_colors() + + # shorthands + C = coloransi.TermColors + cst = self.color_scheme_table + + cst['NoColor'].colors.prompt = C.NoColor + cst['NoColor'].colors.breakpoint_enabled = C.NoColor + cst['NoColor'].colors.breakpoint_disabled = C.NoColor + + cst['Linux'].colors.prompt = C.Green + cst['Linux'].colors.breakpoint_enabled = C.LightRed + cst['Linux'].colors.breakpoint_disabled = C.Red + + cst['LightBG'].colors.prompt = C.Blue + cst['LightBG'].colors.breakpoint_enabled = C.LightRed + cst['LightBG'].colors.breakpoint_disabled = C.Red + + cst['Neutral'].colors.prompt = C.Blue + cst['Neutral'].colors.breakpoint_enabled = C.LightRed + cst['Neutral'].colors.breakpoint_disabled = C.Red + + # Add a python parser so we can syntax highlight source while + # debugging. + self.parser = PyColorize.Parser(style=color_scheme) + self.set_colors(color_scheme) + + # Set the prompt - the default prompt is '(Pdb)' + self.prompt = prompt + self.skip_hidden = True + self.report_skipped = True + + # list of predicates we use to skip frames + self._predicates = self.default_predicates + + # + def set_colors(self, scheme): + """Shorthand access to the color table scheme selector method.""" + self.color_scheme_table.set_active_scheme(scheme) + self.parser.style = scheme + + def set_trace(self, frame=None): + if frame is None: + frame = sys._getframe().f_back + self.initial_frame = frame + return super().set_trace(frame) + + def _hidden_predicate(self, frame): + """ + Given a frame return whether it it should be hidden or not by IPython. + """ + + if self._predicates["readonly"]: + fname = frame.f_code.co_filename + # we need to check for file existence and interactively define + # function would otherwise appear as RO. + if os.path.isfile(fname) and not os.access(fname, os.W_OK): + return True + + if self._predicates["tbhide"]: + if frame in (self.curframe, getattr(self, "initial_frame", None)): + return False + frame_locals = self._get_frame_locals(frame) + if "__tracebackhide__" not in frame_locals: + return False + return frame_locals["__tracebackhide__"] + return False + + def hidden_frames(self, stack): + """ + Given an index in the stack return whether it should be skipped. + + This is used in up/down and where to skip frames. + """ + # The f_locals dictionary is updated from the actual frame + # locals whenever the .f_locals accessor is called, so we + # avoid calling it here to preserve self.curframe_locals. + # Furthermore, there is no good reason to hide the current frame. + ip_hide = [self._hidden_predicate(s[0]) for s in stack] + ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"] + if ip_start and self._predicates["ipython_internal"]: + ip_hide = [h if i > ip_start[0] else True for (i, h) in enumerate(ip_hide)] + return ip_hide + + def interaction(self, frame, traceback): + try: + OldPdb.interaction(self, frame, traceback) + except KeyboardInterrupt: + self.stdout.write("\n" + self.shell.get_exception_only()) + + def precmd(self, line): + """Perform useful escapes on the command before it is executed.""" + + if line.endswith("??"): + line = "pinfo2 " + line[:-2] + elif line.endswith("?"): + line = "pinfo " + line[:-1] + + line = super().precmd(line) + + return line + + def new_do_frame(self, arg): + OldPdb.do_frame(self, arg) + + def new_do_quit(self, arg): + + if hasattr(self, 'old_all_completions'): + self.shell.Completer.all_completions = self.old_all_completions + + return OldPdb.do_quit(self, arg) + + do_q = do_quit = decorate_fn_with_doc(new_do_quit, OldPdb.do_quit) + + def new_do_restart(self, arg): + """Restart command. In the context of ipython this is exactly the same + thing as 'quit'.""" + self.msg("Restart doesn't make sense here. Using 'quit' instead.") + return self.do_quit(arg) + + def print_stack_trace(self, context=None): + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + if context is None: + context = self.context + try: + context = int(context) + if context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError) as e: + raise ValueError("Context must be a positive integer") from e + try: + skipped = 0 + for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack): + if hidden and self.skip_hidden: + skipped += 1 + continue + if skipped: + print( + f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + skipped = 0 + self.print_stack_entry(frame_lineno, context=context) + if skipped: + print( + f"{Colors.excName} [... skipping {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + except KeyboardInterrupt: + pass + + def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ', + context=None): + if context is None: + context = self.context + try: + context = int(context) + if context <= 0: + raise ValueError("Context must be a positive integer") + except (TypeError, ValueError) as e: + raise ValueError("Context must be a positive integer") from e + print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout) + + # vds: >> + frame, lineno = frame_lineno + filename = frame.f_code.co_filename + self.shell.hooks.synchronize_with_editor(filename, lineno, 0) + # vds: << + + def _get_frame_locals(self, frame): + """ " + Accessing f_local of current frame reset the namespace, so we want to avoid + that or the following can happen + + ipdb> foo + "old" + ipdb> foo = "new" + ipdb> foo + "new" + ipdb> where + ipdb> foo + "old" + + So if frame is self.current_frame we instead return self.curframe_locals + + """ + if frame is self.curframe: + return self.curframe_locals + else: + return frame.f_locals + + def format_stack_entry(self, frame_lineno, lprefix=': ', context=None): + if context is None: + context = self.context + try: + context = int(context) + if context <= 0: + print("Context must be a positive integer", file=self.stdout) + except (TypeError, ValueError): + print("Context must be a positive integer", file=self.stdout) + + import reprlib + + ret = [] + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + tpl_link = "%s%%s%s" % (Colors.filenameEm, ColorsNormal) + tpl_call = "%s%%s%s%%s%s" % (Colors.vName, Colors.valEm, ColorsNormal) + tpl_line = "%%s%s%%s %s%%s" % (Colors.lineno, ColorsNormal) + tpl_line_em = "%%s%s%%s %s%%s%s" % (Colors.linenoEm, Colors.line, ColorsNormal) + + frame, lineno = frame_lineno + + return_value = '' + loc_frame = self._get_frame_locals(frame) + if "__return__" in loc_frame: + rv = loc_frame["__return__"] + # return_value += '->' + return_value += reprlib.repr(rv) + "\n" + ret.append(return_value) + + #s = filename + '(' + `lineno` + ')' + filename = self.canonic(frame.f_code.co_filename) + link = tpl_link % py3compat.cast_unicode(filename) + + if frame.f_code.co_name: + func = frame.f_code.co_name + else: + func = "" + + call = "" + if func != "?": + if "__args__" in loc_frame: + args = reprlib.repr(loc_frame["__args__"]) + else: + args = '()' + call = tpl_call % (func, args) + + # The level info should be generated in the same format pdb uses, to + # avoid breaking the pdbtrack functionality of python-mode in *emacs. + if frame is self.curframe: + ret.append('> ') + else: + ret.append(" ") + ret.append("%s(%s)%s\n" % (link, lineno, call)) + + start = lineno - 1 - context//2 + lines = linecache.getlines(filename) + start = min(start, len(lines) - context) + start = max(start, 0) + lines = lines[start : start + context] + + for i, line in enumerate(lines): + show_arrow = start + 1 + i == lineno + linetpl = (frame is self.curframe or show_arrow) and tpl_line_em or tpl_line + ret.append( + self.__format_line( + linetpl, filename, start + 1 + i, line, arrow=show_arrow + ) + ) + return "".join(ret) + + def __format_line(self, tpl_line, filename, lineno, line, arrow=False): + bp_mark = "" + bp_mark_color = "" + + new_line, err = self.parser.format2(line, 'str') + if not err: + line = new_line + + bp = None + if lineno in self.get_file_breaks(filename): + bps = self.get_breaks(filename, lineno) + bp = bps[-1] + + if bp: + Colors = self.color_scheme_table.active_colors + bp_mark = str(bp.number) + bp_mark_color = Colors.breakpoint_enabled + if not bp.enabled: + bp_mark_color = Colors.breakpoint_disabled + + numbers_width = 7 + if arrow: + # This is the line with the error + pad = numbers_width - len(str(lineno)) - len(bp_mark) + num = '%s%s' % (make_arrow(pad), str(lineno)) + else: + num = '%*s' % (numbers_width - len(bp_mark), str(lineno)) + + return tpl_line % (bp_mark_color + bp_mark, num, line) + + def print_list_lines(self, filename, first, last): + """The printing (as opposed to the parsing part of a 'list' + command.""" + try: + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + tpl_line = '%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) + tpl_line_em = '%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, ColorsNormal) + src = [] + if filename == "" and hasattr(self, "_exec_filename"): + filename = self._exec_filename + + for lineno in range(first, last+1): + line = linecache.getline(filename, lineno) + if not line: + break + + if lineno == self.curframe.f_lineno: + line = self.__format_line( + tpl_line_em, filename, lineno, line, arrow=True + ) + else: + line = self.__format_line( + tpl_line, filename, lineno, line, arrow=False + ) + + src.append(line) + self.lineno = lineno + + print(''.join(src), file=self.stdout) + + except KeyboardInterrupt: + pass + + def do_skip_predicates(self, args): + """ + Turn on/off individual predicates as to whether a frame should be hidden/skip. + + The global option to skip (or not) hidden frames is set with skip_hidden + + To change the value of a predicate + + skip_predicates key [true|false] + + Call without arguments to see the current values. + + To permanently change the value of an option add the corresponding + command to your ``~/.pdbrc`` file. If you are programmatically using the + Pdb instance you can also change the ``default_predicates`` class + attribute. + """ + if not args.strip(): + print("current predicates:") + for (p, v) in self._predicates.items(): + print(" ", p, ":", v) + return + type_value = args.strip().split(" ") + if len(type_value) != 2: + print( + f"Usage: skip_predicates , with one of {set(self._predicates.keys())}" + ) + return + + type_, value = type_value + if type_ not in self._predicates: + print(f"{type_!r} not in {set(self._predicates.keys())}") + return + if value.lower() not in ("true", "yes", "1", "no", "false", "0"): + print( + f"{value!r} is invalid - use one of ('true', 'yes', '1', 'no', 'false', '0')" + ) + return + + self._predicates[type_] = value.lower() in ("true", "yes", "1") + if not any(self._predicates.values()): + print( + "Warning, all predicates set to False, skip_hidden may not have any effects." + ) + + def do_skip_hidden(self, arg): + """ + Change whether or not we should skip frames with the + __tracebackhide__ attribute. + """ + if not arg.strip(): + print( + f"skip_hidden = {self.skip_hidden}, use 'yes','no', 'true', or 'false' to change." + ) + elif arg.strip().lower() in ("true", "yes"): + self.skip_hidden = True + elif arg.strip().lower() in ("false", "no"): + self.skip_hidden = False + if not any(self._predicates.values()): + print( + "Warning, all predicates set to False, skip_hidden may not have any effects." + ) + + def do_list(self, arg): + """Print lines of code from the current stack frame + """ + self.lastcmd = 'list' + last = None + if arg: + try: + x = eval(arg, {}, {}) + if type(x) == type(()): + first, last = x + first = int(first) + last = int(last) + if last < first: + # Assume it's a count + last = first + last + else: + first = max(1, int(x) - 5) + except: + print('*** Error in argument:', repr(arg), file=self.stdout) + return + elif self.lineno is None: + first = max(1, self.curframe.f_lineno - 5) + else: + first = self.lineno + 1 + if last is None: + last = first + 10 + self.print_list_lines(self.curframe.f_code.co_filename, first, last) + + # vds: >> + lineno = first + filename = self.curframe.f_code.co_filename + self.shell.hooks.synchronize_with_editor(filename, lineno, 0) + # vds: << + + do_l = do_list + + def getsourcelines(self, obj): + lines, lineno = inspect.findsource(obj) + if inspect.isframe(obj) and obj.f_globals is self._get_frame_locals(obj): + # must be a module frame: do not try to cut a block out of it + return lines, 1 + elif inspect.ismodule(obj): + return lines, 1 + return inspect.getblock(lines[lineno:]), lineno+1 + + def do_longlist(self, arg): + """Print lines of code from the current stack frame. + + Shows more lines than 'list' does. + """ + self.lastcmd = 'longlist' + try: + lines, lineno = self.getsourcelines(self.curframe) + except OSError as err: + self.error(err) + return + last = lineno + len(lines) + self.print_list_lines(self.curframe.f_code.co_filename, lineno, last) + do_ll = do_longlist + + def do_debug(self, arg): + """debug code + Enter a recursive debugger that steps through the code + argument (which is an arbitrary expression or statement to be + executed in the current environment). + """ + trace_function = sys.gettrace() + sys.settrace(None) + globals = self.curframe.f_globals + locals = self.curframe_locals + p = self.__class__(completekey=self.completekey, + stdin=self.stdin, stdout=self.stdout) + p.use_rawinput = self.use_rawinput + p.prompt = "(%s) " % self.prompt.strip() + self.message("ENTERING RECURSIVE DEBUGGER") + sys.call_tracing(p.run, (arg, globals, locals)) + self.message("LEAVING RECURSIVE DEBUGGER") + sys.settrace(trace_function) + self.lastcmd = p.lastcmd + + def do_pdef(self, arg): + """Print the call signature for any callable object. + + The debugger interface to %pdef""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pdef")(arg, namespaces=namespaces) + + def do_pdoc(self, arg): + """Print the docstring for an object. + + The debugger interface to %pdoc.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pdoc")(arg, namespaces=namespaces) + + def do_pfile(self, arg): + """Print (or run through pager) the file where an object is defined. + + The debugger interface to %pfile. + """ + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pfile")(arg, namespaces=namespaces) + + def do_pinfo(self, arg): + """Provide detailed information about an object. + + The debugger interface to %pinfo, i.e., obj?.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pinfo")(arg, namespaces=namespaces) + + def do_pinfo2(self, arg): + """Provide extra detailed information about an object. + + The debugger interface to %pinfo2, i.e., obj??.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("pinfo2")(arg, namespaces=namespaces) + + def do_psource(self, arg): + """Print (or run through pager) the source code for an object.""" + namespaces = [ + ("Locals", self.curframe_locals), + ("Globals", self.curframe.f_globals), + ] + self.shell.find_line_magic("psource")(arg, namespaces=namespaces) + + def do_where(self, arg): + """w(here) + Print a stack trace, with the most recent frame at the bottom. + An arrow indicates the "current frame", which determines the + context of most commands. 'bt' is an alias for this command. + + Take a number as argument as an (optional) number of context line to + print""" + if arg: + try: + context = int(arg) + except ValueError as err: + self.error(err) + return + self.print_stack_trace(context) + else: + self.print_stack_trace() + + do_w = do_where + + def break_anywhere(self, frame): + """ + _stop_in_decorator_internals is overly restrictive, as we may still want + to trace function calls, so we need to also update break_anywhere so + that is we don't `stop_here`, because of debugger skip, we may still + stop at any point inside the function + + """ + + sup = super().break_anywhere(frame) + if sup: + return sup + if self._predicates["debuggerskip"]: + if DEBUGGERSKIP in frame.f_code.co_varnames: + return True + if frame.f_back and self._get_frame_locals(frame.f_back).get(DEBUGGERSKIP): + return True + return False + + def _is_in_decorator_internal_and_should_skip(self, frame): + """ + Utility to tell us whether we are in a decorator internal and should stop. + + """ + + # if we are disabled don't skip + if not self._predicates["debuggerskip"]: + return False + + # if frame is tagged, skip by default. + if DEBUGGERSKIP in frame.f_code.co_varnames: + return True + + # if one of the parent frame value set to True skip as well. + + cframe = frame + while getattr(cframe, "f_back", None): + cframe = cframe.f_back + if self._get_frame_locals(cframe).get(DEBUGGERSKIP): + return True + + return False + + def stop_here(self, frame): + + if self._is_in_decorator_internal_and_should_skip(frame) is True: + return False + + hidden = False + if self.skip_hidden: + hidden = self._hidden_predicate(frame) + if hidden: + if self.report_skipped: + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + print( + f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n" + ) + return super().stop_here(frame) + + def do_up(self, arg): + """u(p) [count] + Move the current frame count (default one) levels up in the + stack trace (to an older frame). + + Will skip hidden frames. + """ + # modified version of upstream that skips + # frames with __tracebackhide__ + if self.curindex == 0: + self.error("Oldest frame") + return + try: + count = int(arg or 1) + except ValueError: + self.error("Invalid frame count (%s)" % arg) + return + skipped = 0 + if count < 0: + _newframe = 0 + else: + counter = 0 + hidden_frames = self.hidden_frames(self.stack) + for i in range(self.curindex - 1, -1, -1): + if hidden_frames[i] and self.skip_hidden: + skipped += 1 + continue + counter += 1 + if counter >= count: + break + else: + # if no break occurred. + self.error( + "all frames above hidden, use `skip_hidden False` to get get into those." + ) + return + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + _newframe = i + self._select_frame(_newframe) + if skipped: + print( + f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + + def do_down(self, arg): + """d(own) [count] + Move the current frame count (default one) levels down in the + stack trace (to a newer frame). + + Will skip hidden frames. + """ + if self.curindex + 1 == len(self.stack): + self.error("Newest frame") + return + try: + count = int(arg or 1) + except ValueError: + self.error("Invalid frame count (%s)" % arg) + return + if count < 0: + _newframe = len(self.stack) - 1 + else: + counter = 0 + skipped = 0 + hidden_frames = self.hidden_frames(self.stack) + for i in range(self.curindex + 1, len(self.stack)): + if hidden_frames[i] and self.skip_hidden: + skipped += 1 + continue + counter += 1 + if counter >= count: + break + else: + self.error( + "all frames below hidden, use `skip_hidden False` to get get into those." + ) + return + + Colors = self.color_scheme_table.active_colors + ColorsNormal = Colors.Normal + if skipped: + print( + f"{Colors.excName} [... skipped {skipped} hidden frame(s)]{ColorsNormal}\n" + ) + _newframe = i + + self._select_frame(_newframe) + + do_d = do_down + do_u = do_up + + def do_context(self, context): + """context number_of_lines + Set the number of lines of source code to show when displaying + stacktrace information. + """ + try: + new_context = int(context) + if new_context <= 0: + raise ValueError() + self.context = new_context + except ValueError: + self.error("The 'context' command requires a positive integer argument.") + + +class InterruptiblePdb(Pdb): + """Version of debugger where KeyboardInterrupt exits the debugger altogether.""" + + def cmdloop(self, intro=None): + """Wrap cmdloop() such that KeyboardInterrupt stops the debugger.""" + try: + return OldPdb.cmdloop(self, intro=intro) + except KeyboardInterrupt: + self.stop_here = lambda frame: False + self.do_quit("") + sys.settrace(None) + self.quitting = False + raise + + def _cmdloop(self): + while True: + try: + # keyboard interrupts allow for an easy way to cancel + # the current command, so allow them during interactive input + self.allow_kbdint = True + self.cmdloop() + self.allow_kbdint = False + break + except KeyboardInterrupt: + self.message('--KeyboardInterrupt--') + raise + + +def set_trace(frame=None): + """ + Start debugging from `frame`. + + If frame is not specified, debugging starts from caller's frame. + """ + Pdb().set_trace(frame or sys._getframe().f_back) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/display.py b/myenv/lib/python3.10/site-packages/IPython/core/display.py new file mode 100644 index 000000000..23d8636b5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/display.py @@ -0,0 +1,1290 @@ +# -*- coding: utf-8 -*- +"""Top-level display functions for displaying object in different formats.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from binascii import b2a_base64, hexlify +import html +import json +import mimetypes +import os +import struct +import warnings +from copy import deepcopy +from os.path import splitext +from pathlib import Path, PurePath + +from IPython.utils.py3compat import cast_unicode +from IPython.testing.skipdoctest import skip_doctest +from . import display_functions + + +__all__ = ['display_pretty', 'display_html', 'display_markdown', + 'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', + 'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', + 'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', + 'GeoJSON', 'Javascript', 'Image', 'set_matplotlib_formats', + 'set_matplotlib_close', + 'Video'] + +_deprecated_names = ["display", "clear_output", "publish_display_data", "update_display", "DisplayHandle"] + +__all__ = __all__ + _deprecated_names + + +# ----- warn to import from IPython.display ----- + +from warnings import warn + + +def __getattr__(name): + if name in _deprecated_names: + warn(f"Importing {name} from IPython.core.display is deprecated since IPython 7.14, please import from IPython display", DeprecationWarning, stacklevel=2) + return getattr(display_functions, name) + + if name in globals().keys(): + return globals()[name] + else: + raise AttributeError(f"module {__name__} has no attribute {name}") + + +#----------------------------------------------------------------------------- +# utility functions +#----------------------------------------------------------------------------- + +def _safe_exists(path): + """Check path, but don't let exceptions raise""" + try: + return os.path.exists(path) + except Exception: + return False + + +def _display_mimetype(mimetype, objs, raw=False, metadata=None): + """internal implementation of all display_foo methods + + Parameters + ---------- + mimetype : str + The mimetype to be published (e.g. 'image/png') + *objs : object + The Python objects to display, or if raw=True raw text data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + if metadata: + metadata = {mimetype: metadata} + if raw: + # turn list of pngdata into list of { 'image/png': pngdata } + objs = [ {mimetype: obj} for obj in objs ] + display_functions.display(*objs, raw=raw, metadata=metadata, include=[mimetype]) + +#----------------------------------------------------------------------------- +# Main functions +#----------------------------------------------------------------------------- + + +def display_pretty(*objs, **kwargs): + """Display the pretty (default) representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw text data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/plain', objs, **kwargs) + + +def display_html(*objs, **kwargs): + """Display the HTML representation of an object. + + Note: If raw=False and the object does not have a HTML + representation, no HTML will be shown. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw HTML data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/html', objs, **kwargs) + + +def display_markdown(*objs, **kwargs): + """Displays the Markdown representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw markdown data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + + _display_mimetype('text/markdown', objs, **kwargs) + + +def display_svg(*objs, **kwargs): + """Display the SVG representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw svg data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/svg+xml', objs, **kwargs) + + +def display_png(*objs, **kwargs): + """Display the PNG representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw png data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/png', objs, **kwargs) + + +def display_jpeg(*objs, **kwargs): + """Display the JPEG representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw JPEG data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('image/jpeg', objs, **kwargs) + + +def display_latex(*objs, **kwargs): + """Display the LaTeX representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw latex data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('text/latex', objs, **kwargs) + + +def display_json(*objs, **kwargs): + """Display the JSON representation of an object. + + Note that not many frontends support displaying JSON. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw json data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/json', objs, **kwargs) + + +def display_javascript(*objs, **kwargs): + """Display the Javascript representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw javascript data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/javascript', objs, **kwargs) + + +def display_pdf(*objs, **kwargs): + """Display the PDF representation of an object. + + Parameters + ---------- + *objs : object + The Python objects to display, or if raw=True raw javascript data to + display. + raw : bool + Are the data objects raw data or Python objects that need to be + formatted before display? [default: False] + metadata : dict (optional) + Metadata to be associated with the specific mimetype output. + """ + _display_mimetype('application/pdf', objs, **kwargs) + + +#----------------------------------------------------------------------------- +# Smart classes +#----------------------------------------------------------------------------- + + +class DisplayObject(object): + """An object that wraps data to be displayed.""" + + _read_flags = 'r' + _show_mem_addr = False + metadata = None + + def __init__(self, data=None, url=None, filename=None, metadata=None): + """Create a display object given raw data. + + When this object is returned by an expression or passed to the + display function, it will result in the data being displayed + in the frontend. The MIME type of the data should match the + subclasses used, so the Png subclass should be used for 'image/png' + data. If the data is a URL, the data will first be downloaded + and then displayed. If + + Parameters + ---------- + data : unicode, str or bytes + The raw data or a URL or file to load the data from + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + metadata : dict + Dict of metadata associated to be the object when displayed + """ + if isinstance(data, (Path, PurePath)): + data = str(data) + + if data is not None and isinstance(data, str): + if data.startswith('http') and url is None: + url = data + filename = None + data = None + elif _safe_exists(data) and filename is None: + url = None + filename = data + data = None + + self.url = url + self.filename = filename + # because of @data.setter methods in + # subclasses ensure url and filename are set + # before assigning to self.data + self.data = data + + if metadata is not None: + self.metadata = metadata + elif self.metadata is None: + self.metadata = {} + + self.reload() + self._check_data() + + def __repr__(self): + if not self._show_mem_addr: + cls = self.__class__ + r = "<%s.%s object>" % (cls.__module__, cls.__name__) + else: + r = super(DisplayObject, self).__repr__() + return r + + def _check_data(self): + """Override in subclasses if there's something to check.""" + pass + + def _data_and_metadata(self): + """shortcut for returning metadata with shape information, if defined""" + if self.metadata: + return self.data, deepcopy(self.metadata) + else: + return self.data + + def reload(self): + """Reload the raw data from file or URL.""" + if self.filename is not None: + encoding = None if "b" in self._read_flags else "utf-8" + with open(self.filename, self._read_flags, encoding=encoding) as f: + self.data = f.read() + elif self.url is not None: + # Deferred import + from urllib.request import urlopen + response = urlopen(self.url) + data = response.read() + # extract encoding from header, if there is one: + encoding = None + if 'content-type' in response.headers: + for sub in response.headers['content-type'].split(';'): + sub = sub.strip() + if sub.startswith('charset'): + encoding = sub.split('=')[-1].strip() + break + if 'content-encoding' in response.headers: + # TODO: do deflate? + if 'gzip' in response.headers['content-encoding']: + import gzip + from io import BytesIO + + # assume utf-8 if encoding is not specified + with gzip.open( + BytesIO(data), "rt", encoding=encoding or "utf-8" + ) as fp: + encoding = None + data = fp.read() + + # decode data, if an encoding was specified + # We only touch self.data once since + # subclasses such as SVG have @data.setter methods + # that transform self.data into ... well svg. + if encoding: + self.data = data.decode(encoding, 'replace') + else: + self.data = data + + +class TextDisplayObject(DisplayObject): + """Create a text display object given raw data. + + Parameters + ---------- + data : str or unicode + The raw data or a URL or file to load the data from. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + metadata : dict + Dict of metadata associated to be the object when displayed + """ + def _check_data(self): + if self.data is not None and not isinstance(self.data, str): + raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data)) + +class Pretty(TextDisplayObject): + + def _repr_pretty_(self, pp, cycle): + return pp.text(self.data) + + +class HTML(TextDisplayObject): + + def __init__(self, data=None, url=None, filename=None, metadata=None): + def warn(): + if not data: + return False + + # + # Avoid calling lower() on the entire data, because it could be a + # long string and we're only interested in its beginning and end. + # + prefix = data[:10].lower() + suffix = data[-10:].lower() + return prefix.startswith("') + m_warn.assert_not_called() + + display.HTML('') + m_warn.assert_called_with('Consider using IPython.display.IFrame instead') + + m_warn.reset_mock() + display.HTML('') + m_warn.assert_called_with('Consider using IPython.display.IFrame instead') + +def test_progress(): + p = display.ProgressBar(10) + assert "0/10" in repr(p) + p.html_width = "100%" + p.progress = 5 + assert ( + p._repr_html_() == "" + ) + + +def test_progress_iter(): + with capture_output(display=False) as captured: + for i in display.ProgressBar(5): + out = captured.stdout + assert "{0}/5".format(i) in out + out = captured.stdout + assert "5/5" in out + + +def test_json(): + d = {'a': 5} + lis = [d] + metadata = [ + {'expanded': False, 'root': 'root'}, + {'expanded': True, 'root': 'root'}, + {'expanded': False, 'root': 'custom'}, + {'expanded': True, 'root': 'custom'}, + ] + json_objs = [ + display.JSON(d), + display.JSON(d, expanded=True), + display.JSON(d, root='custom'), + display.JSON(d, expanded=True, root='custom'), + ] + for j, md in zip(json_objs, metadata): + assert j._repr_json_() == (d, md) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + j = display.JSON(json.dumps(d)) + assert len(w) == 1 + assert j._repr_json_() == (d, metadata[0]) + + json_objs = [ + display.JSON(lis), + display.JSON(lis, expanded=True), + display.JSON(lis, root='custom'), + display.JSON(lis, expanded=True, root='custom'), + ] + for j, md in zip(json_objs, metadata): + assert j._repr_json_() == (lis, md) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + j = display.JSON(json.dumps(lis)) + assert len(w) == 1 + assert j._repr_json_() == (lis, metadata[0]) + + +def test_video_embedding(): + """use a tempfile, with dummy-data, to ensure that video embedding doesn't crash""" + v = display.Video("http://ignored") + assert not v.embed + html = v._repr_html_() + assert 'src="data:' not in html + assert 'src="http://ignored"' in html + + with pytest.raises(ValueError): + v = display.Video(b'abc') + + with NamedFileInTemporaryDirectory('test.mp4') as f: + f.write(b'abc') + f.close() + + v = display.Video(f.name) + assert not v.embed + html = v._repr_html_() + assert 'src="data:' not in html + + v = display.Video(f.name, embed=True) + html = v._repr_html_() + assert 'src="data:video/mp4;base64,YWJj"' in html + + v = display.Video(f.name, embed=True, mimetype='video/other') + html = v._repr_html_() + assert 'src="data:video/other;base64,YWJj"' in html + + v = display.Video(b'abc', embed=True, mimetype='video/mp4') + html = v._repr_html_() + assert 'src="data:video/mp4;base64,YWJj"' in html + + v = display.Video(u'YWJj', embed=True, mimetype='video/xyz') + html = v._repr_html_() + assert 'src="data:video/xyz;base64,YWJj"' in html + +def test_html_metadata(): + s = "

Test

" + h = display.HTML(s, metadata={"isolated": True}) + assert h._repr_html_() == (s, {"isolated": True}) + + +def test_display_id(): + ip = get_ipython() + with mock.patch.object(ip.display_pub, 'publish') as pub: + handle = display.display('x') + assert handle is None + handle = display.display('y', display_id='secret') + assert isinstance(handle, display.DisplayHandle) + handle2 = display.display('z', display_id=True) + assert isinstance(handle2, display.DisplayHandle) + assert handle.display_id != handle2.display_id + + assert pub.call_count == 3 + args, kwargs = pub.call_args_list[0] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('x') + }, + 'metadata': {}, + } + args, kwargs = pub.call_args_list[1] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('y') + }, + 'metadata': {}, + 'transient': { + 'display_id': handle.display_id, + }, + } + args, kwargs = pub.call_args_list[2] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('z') + }, + 'metadata': {}, + 'transient': { + 'display_id': handle2.display_id, + }, + } + + +def test_update_display(): + ip = get_ipython() + with mock.patch.object(ip.display_pub, 'publish') as pub: + with pytest.raises(TypeError): + display.update_display('x') + display.update_display('x', display_id='1') + display.update_display('y', display_id='2') + args, kwargs = pub.call_args_list[0] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('x') + }, + 'metadata': {}, + 'transient': { + 'display_id': '1', + }, + 'update': True, + } + args, kwargs = pub.call_args_list[1] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('y') + }, + 'metadata': {}, + 'transient': { + 'display_id': '2', + }, + 'update': True, + } + + +def test_display_handle(): + ip = get_ipython() + handle = display.DisplayHandle() + assert isinstance(handle.display_id, str) + handle = display.DisplayHandle("my-id") + assert handle.display_id == "my-id" + with mock.patch.object(ip.display_pub, "publish") as pub: + handle.display("x") + handle.update("y") + + args, kwargs = pub.call_args_list[0] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('x') + }, + 'metadata': {}, + 'transient': { + 'display_id': handle.display_id, + } + } + args, kwargs = pub.call_args_list[1] + assert args == () + assert kwargs == { + 'data': { + 'text/plain': repr('y') + }, + 'metadata': {}, + 'transient': { + 'display_id': handle.display_id, + }, + 'update': True, + } + + +def test_image_alt_tag(): + """Simple test for display.Image(args, alt=x,)""" + thisurl = "http://example.com/image.png" + img = display.Image(url=thisurl, alt="an image") + assert 'an image' % (thisurl) == img._repr_html_() + img = display.Image(url=thisurl, unconfined=True, alt="an image") + assert ( + 'an image' % (thisurl) + == img._repr_html_() + ) + img = display.Image(url=thisurl, alt='>"& <') + assert '>"& <' % (thisurl) == img._repr_html_() + + img = display.Image(url=thisurl, metadata={"alt": "an image"}) + assert img.alt == "an image" + here = os.path.dirname(__file__) + img = display.Image(os.path.join(here, "2x2.png"), alt="an image") + assert img.alt == "an image" + _, md = img._repr_png_() + assert md["alt"] == "an image" + + +def test_image_bad_filename_raises_proper_exception(): + with pytest.raises(FileNotFoundError): + display.Image("/this/file/does/not/exist/")._repr_png_() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_displayhook.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_displayhook.py new file mode 100644 index 000000000..22899f3dd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_displayhook.py @@ -0,0 +1,112 @@ +import sys +from IPython.testing.tools import AssertPrints, AssertNotPrints +from IPython.core.displayhook import CapturingDisplayHook +from IPython.utils.capture import CapturedIO + +def test_output_displayed(): + """Checking to make sure that output is displayed""" + + with AssertPrints('2'): + ip.run_cell('1+1', store_history=True) + + with AssertPrints('2'): + ip.run_cell('1+1 # comment with a semicolon;', store_history=True) + + with AssertPrints('2'): + ip.run_cell('1+1\n#commented_out_function();', store_history=True) + + +def test_output_quiet(): + """Checking to make sure that output is quiet""" + + with AssertNotPrints('2'): + ip.run_cell('1+1;', store_history=True) + + with AssertNotPrints('2'): + ip.run_cell('1+1; # comment with a semicolon', store_history=True) + + with AssertNotPrints('2'): + ip.run_cell('1+1;\n#commented_out_function()', store_history=True) + +def test_underscore_no_overwrite_user(): + ip.run_cell('_ = 42', store_history=True) + ip.run_cell('1+1', store_history=True) + + with AssertPrints('42'): + ip.run_cell('print(_)', store_history=True) + + ip.run_cell('del _', store_history=True) + ip.run_cell('6+6', store_history=True) + with AssertPrints('12'): + ip.run_cell('_', store_history=True) + + +def test_underscore_no_overwrite_builtins(): + ip.run_cell("import gettext ; gettext.install('foo')", store_history=True) + ip.run_cell('3+3', store_history=True) + + with AssertPrints('gettext'): + ip.run_cell('print(_)', store_history=True) + + ip.run_cell('_ = "userset"', store_history=True) + + with AssertPrints('userset'): + ip.run_cell('print(_)', store_history=True) + ip.run_cell('import builtins; del builtins._') + + +def test_interactivehooks_ast_modes(): + """ + Test that ast nodes can be triggered with different modes + """ + saved_mode = ip.ast_node_interactivity + ip.ast_node_interactivity = 'last_expr_or_assign' + + try: + with AssertPrints('2'): + ip.run_cell('a = 1+1', store_history=True) + + with AssertPrints('9'): + ip.run_cell('b = 1+8 # comment with a semicolon;', store_history=False) + + with AssertPrints('7'): + ip.run_cell('c = 1+6\n#commented_out_function();', store_history=True) + + ip.run_cell('d = 11', store_history=True) + with AssertPrints('12'): + ip.run_cell('d += 1', store_history=True) + + with AssertNotPrints('42'): + ip.run_cell('(u,v) = (41+1, 43-1)') + + finally: + ip.ast_node_interactivity = saved_mode + +def test_interactivehooks_ast_modes_semi_suppress(): + """ + Test that ast nodes can be triggered with different modes and suppressed + by semicolon + """ + saved_mode = ip.ast_node_interactivity + ip.ast_node_interactivity = 'last_expr_or_assign' + + try: + with AssertNotPrints('2'): + ip.run_cell('x = 1+1;', store_history=True) + + with AssertNotPrints('7'): + ip.run_cell('y = 1+6; # comment with a semicolon', store_history=True) + + with AssertNotPrints('9'): + ip.run_cell('z = 1+8;\n#commented_out_function()', store_history=True) + + finally: + ip.ast_node_interactivity = saved_mode + +def test_capture_display_hook_format(): + """Tests that the capture display hook conforms to the CapturedIO output format""" + hook = CapturingDisplayHook(ip) + hook({"foo": "bar"}) + captured = CapturedIO(sys.stdout, sys.stderr, hook.outputs) + # Should not raise with RichOutput transformation error + captured.outputs diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_events.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_events.py new file mode 100644 index 000000000..cc9bf40fd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_events.py @@ -0,0 +1,91 @@ +import unittest +from unittest.mock import Mock + +from IPython.core import events +import IPython.testing.tools as tt + + +@events._define_event +def ping_received(): + pass + + +@events._define_event +def event_with_argument(argument): + pass + + +class CallbackTests(unittest.TestCase): + def setUp(self): + self.em = events.EventManager(get_ipython(), + {'ping_received': ping_received, + 'event_with_argument': event_with_argument}) + + def test_register_unregister(self): + cb = Mock() + + self.em.register('ping_received', cb) + self.em.trigger('ping_received') + self.assertEqual(cb.call_count, 1) + + self.em.unregister('ping_received', cb) + self.em.trigger('ping_received') + self.assertEqual(cb.call_count, 1) + + def test_bare_function_missed_unregister(self): + def cb1(): + ... + + def cb2(): + ... + + self.em.register("ping_received", cb1) + self.assertRaises(ValueError, self.em.unregister, "ping_received", cb2) + self.em.unregister("ping_received", cb1) + + def test_cb_error(self): + cb = Mock(side_effect=ValueError) + self.em.register('ping_received', cb) + with tt.AssertPrints("Error in callback"): + self.em.trigger('ping_received') + + def test_cb_keyboard_interrupt(self): + cb = Mock(side_effect=KeyboardInterrupt) + self.em.register('ping_received', cb) + with tt.AssertPrints("Error in callback"): + self.em.trigger('ping_received') + + def test_unregister_during_callback(self): + invoked = [False] * 3 + + def func1(*_): + invoked[0] = True + self.em.unregister('ping_received', func1) + self.em.register('ping_received', func3) + + def func2(*_): + invoked[1] = True + self.em.unregister('ping_received', func2) + + def func3(*_): + invoked[2] = True + + self.em.register('ping_received', func1) + self.em.register('ping_received', func2) + + self.em.trigger('ping_received') + self.assertEqual([True, True, False], invoked) + self.assertEqual([func3], self.em.callbacks['ping_received']) + + def test_ignore_event_arguments_if_no_argument_required(self): + call_count = [0] + def event_with_no_argument(): + call_count[0] += 1 + + self.em.register('event_with_argument', event_with_no_argument) + self.em.trigger('event_with_argument', 'the argument') + self.assertEqual(call_count[0], 1) + + self.em.unregister('event_with_argument', event_with_no_argument) + self.em.trigger('ping_received') + self.assertEqual(call_count[0], 1) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_extension.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_extension.py new file mode 100644 index 000000000..24ecf7e97 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_extension.py @@ -0,0 +1,95 @@ +import os.path + +from tempfile import TemporaryDirectory + +import IPython.testing.tools as tt +from IPython.utils.syspathcontext import prepended_to_syspath + +ext1_content = """ +def load_ipython_extension(ip): + print("Running ext1 load") + +def unload_ipython_extension(ip): + print("Running ext1 unload") +""" + +ext2_content = """ +def load_ipython_extension(ip): + print("Running ext2 load") +""" + +ext3_content = """ +def load_ipython_extension(ip): + ip2 = get_ipython() + print(ip is ip2) +""" + +def test_extension_loading(): + em = get_ipython().extension_manager + with TemporaryDirectory() as td: + ext1 = os.path.join(td, "ext1.py") + with open(ext1, "w", encoding="utf-8") as f: + f.write(ext1_content) + + ext2 = os.path.join(td, "ext2.py") + with open(ext2, "w", encoding="utf-8") as f: + f.write(ext2_content) + + with prepended_to_syspath(td): + assert 'ext1' not in em.loaded + assert 'ext2' not in em.loaded + + # Load extension + with tt.AssertPrints("Running ext1 load"): + assert em.load_extension('ext1') is None + assert 'ext1' in em.loaded + + # Should refuse to load it again + with tt.AssertNotPrints("Running ext1 load"): + assert em.load_extension('ext1') == 'already loaded' + + # Reload + with tt.AssertPrints("Running ext1 unload"): + with tt.AssertPrints("Running ext1 load", suppress=False): + em.reload_extension('ext1') + + # Unload + with tt.AssertPrints("Running ext1 unload"): + assert em.unload_extension('ext1') is None + + # Can't unload again + with tt.AssertNotPrints("Running ext1 unload"): + assert em.unload_extension('ext1') == 'not loaded' + assert em.unload_extension('ext2') == 'not loaded' + + # Load extension 2 + with tt.AssertPrints("Running ext2 load"): + assert em.load_extension('ext2') is None + + # Can't unload this + assert em.unload_extension('ext2') == 'no unload function' + + # But can reload it + with tt.AssertPrints("Running ext2 load"): + em.reload_extension('ext2') + + +def test_extension_builtins(): + em = get_ipython().extension_manager + with TemporaryDirectory() as td: + ext3 = os.path.join(td, "ext3.py") + with open(ext3, "w", encoding="utf-8") as f: + f.write(ext3_content) + + assert 'ext3' not in em.loaded + + with prepended_to_syspath(td): + # Load extension + with tt.AssertPrints("True"): + assert em.load_extension('ext3') is None + assert 'ext3' in em.loaded + + +def test_non_extension(): + em = get_ipython().extension_manager + assert em.load_extension("sys") == "no load function" diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_formatters.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_formatters.py new file mode 100644 index 000000000..c642befac --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_formatters.py @@ -0,0 +1,531 @@ +"""Tests for the Formatters.""" + +from math import pi + +try: + import numpy +except: + numpy = None +import pytest + +from IPython import get_ipython +from traitlets.config import Config +from IPython.core.formatters import ( + PlainTextFormatter, HTMLFormatter, PDFFormatter, _mod_name_key, + DisplayFormatter, JSONFormatter, +) +from IPython.utils.io import capture_output + +class A(object): + def __repr__(self): + return 'A()' + +class B(A): + def __repr__(self): + return 'B()' + +class C: + pass + +class BadRepr(object): + def __repr__(self): + raise ValueError("bad repr") + +class BadPretty(object): + _repr_pretty_ = None + +class GoodPretty(object): + def _repr_pretty_(self, pp, cycle): + pp.text('foo') + + def __repr__(self): + return 'GoodPretty()' + +def foo_printer(obj, pp, cycle): + pp.text('foo') + +def test_pretty(): + f = PlainTextFormatter() + f.for_type(A, foo_printer) + assert f(A()) == "foo" + assert f(B()) == "B()" + assert f(GoodPretty()) == "foo" + # Just don't raise an exception for the following: + f(BadPretty()) + + f.pprint = False + assert f(A()) == "A()" + assert f(B()) == "B()" + assert f(GoodPretty()) == "GoodPretty()" + + +def test_deferred(): + f = PlainTextFormatter() + +def test_precision(): + """test various values for float_precision.""" + f = PlainTextFormatter() + assert f(pi) == repr(pi) + f.float_precision = 0 + if numpy: + po = numpy.get_printoptions() + assert po["precision"] == 0 + assert f(pi) == "3" + f.float_precision = 2 + if numpy: + po = numpy.get_printoptions() + assert po["precision"] == 2 + assert f(pi) == "3.14" + f.float_precision = "%g" + if numpy: + po = numpy.get_printoptions() + assert po["precision"] == 2 + assert f(pi) == "3.14159" + f.float_precision = "%e" + assert f(pi) == "3.141593e+00" + f.float_precision = "" + if numpy: + po = numpy.get_printoptions() + assert po["precision"] == 8 + assert f(pi) == repr(pi) + + +def test_bad_precision(): + """test various invalid values for float_precision.""" + f = PlainTextFormatter() + def set_fp(p): + f.float_precision = p + + pytest.raises(ValueError, set_fp, "%") + pytest.raises(ValueError, set_fp, "%.3f%i") + pytest.raises(ValueError, set_fp, "foo") + pytest.raises(ValueError, set_fp, -1) + +def test_for_type(): + f = PlainTextFormatter() + + # initial return, None + assert f.for_type(C, foo_printer) is None + # no func queries + assert f.for_type(C) is foo_printer + # shouldn't change anything + assert f.for_type(C) is foo_printer + # None should do the same + assert f.for_type(C, None) is foo_printer + assert f.for_type(C, None) is foo_printer + +def test_for_type_string(): + f = PlainTextFormatter() + + type_str = '%s.%s' % (C.__module__, 'C') + + # initial return, None + assert f.for_type(type_str, foo_printer) is None + # no func queries + assert f.for_type(type_str) is foo_printer + assert _mod_name_key(C) in f.deferred_printers + assert f.for_type(C) is foo_printer + assert _mod_name_key(C) not in f.deferred_printers + assert C in f.type_printers + +def test_for_type_by_name(): + f = PlainTextFormatter() + + mod = C.__module__ + + # initial return, None + assert f.for_type_by_name(mod, "C", foo_printer) is None + # no func queries + assert f.for_type_by_name(mod, "C") is foo_printer + # shouldn't change anything + assert f.for_type_by_name(mod, "C") is foo_printer + # None should do the same + assert f.for_type_by_name(mod, "C", None) is foo_printer + assert f.for_type_by_name(mod, "C", None) is foo_printer + + +def test_lookup(): + f = PlainTextFormatter() + + f.for_type(C, foo_printer) + assert f.lookup(C()) is foo_printer + with pytest.raises(KeyError): + f.lookup(A()) + +def test_lookup_string(): + f = PlainTextFormatter() + type_str = '%s.%s' % (C.__module__, 'C') + + f.for_type(type_str, foo_printer) + assert f.lookup(C()) is foo_printer + # should move from deferred to imported dict + assert _mod_name_key(C) not in f.deferred_printers + assert C in f.type_printers + +def test_lookup_by_type(): + f = PlainTextFormatter() + f.for_type(C, foo_printer) + assert f.lookup_by_type(C) is foo_printer + with pytest.raises(KeyError): + f.lookup_by_type(A) + +def test_lookup_by_type_string(): + f = PlainTextFormatter() + type_str = '%s.%s' % (C.__module__, 'C') + f.for_type(type_str, foo_printer) + + # verify insertion + assert _mod_name_key(C) in f.deferred_printers + assert C not in f.type_printers + + assert f.lookup_by_type(type_str) is foo_printer + # lookup by string doesn't cause import + assert _mod_name_key(C) in f.deferred_printers + assert C not in f.type_printers + + assert f.lookup_by_type(C) is foo_printer + # should move from deferred to imported dict + assert _mod_name_key(C) not in f.deferred_printers + assert C in f.type_printers + +def test_in_formatter(): + f = PlainTextFormatter() + f.for_type(C, foo_printer) + type_str = '%s.%s' % (C.__module__, 'C') + assert C in f + assert type_str in f + +def test_string_in_formatter(): + f = PlainTextFormatter() + type_str = '%s.%s' % (C.__module__, 'C') + f.for_type(type_str, foo_printer) + assert type_str in f + assert C in f + +def test_pop(): + f = PlainTextFormatter() + f.for_type(C, foo_printer) + assert f.lookup_by_type(C) is foo_printer + assert f.pop(C, None) is foo_printer + f.for_type(C, foo_printer) + assert f.pop(C) is foo_printer + with pytest.raises(KeyError): + f.lookup_by_type(C) + with pytest.raises(KeyError): + f.pop(C) + with pytest.raises(KeyError): + f.pop(A) + assert f.pop(A, None) is None + +def test_pop_string(): + f = PlainTextFormatter() + type_str = '%s.%s' % (C.__module__, 'C') + + with pytest.raises(KeyError): + f.pop(type_str) + + f.for_type(type_str, foo_printer) + f.pop(type_str) + with pytest.raises(KeyError): + f.lookup_by_type(C) + with pytest.raises(KeyError): + f.pop(type_str) + + f.for_type(C, foo_printer) + assert f.pop(type_str, None) is foo_printer + with pytest.raises(KeyError): + f.lookup_by_type(C) + with pytest.raises(KeyError): + f.pop(type_str) + assert f.pop(type_str, None) is None + + +def test_error_method(): + f = HTMLFormatter() + class BadHTML(object): + def _repr_html_(self): + raise ValueError("Bad HTML") + bad = BadHTML() + with capture_output() as captured: + result = f(bad) + assert result is None + assert "Traceback" in captured.stdout + assert "Bad HTML" in captured.stdout + assert "_repr_html_" in captured.stdout + +def test_nowarn_notimplemented(): + f = HTMLFormatter() + class HTMLNotImplemented(object): + def _repr_html_(self): + raise NotImplementedError + h = HTMLNotImplemented() + with capture_output() as captured: + result = f(h) + assert result is None + assert "" == captured.stderr + assert "" == captured.stdout + + +def test_warn_error_for_type(): + f = HTMLFormatter() + f.for_type(int, lambda i: name_error) + with capture_output() as captured: + result = f(5) + assert result is None + assert "Traceback" in captured.stdout + assert "NameError" in captured.stdout + assert "name_error" in captured.stdout + +def test_error_pretty_method(): + f = PlainTextFormatter() + class BadPretty(object): + def _repr_pretty_(self): + return "hello" + bad = BadPretty() + with capture_output() as captured: + result = f(bad) + assert result is None + assert "Traceback" in captured.stdout + assert "_repr_pretty_" in captured.stdout + assert "given" in captured.stdout + assert "argument" in captured.stdout + + +def test_bad_repr_traceback(): + f = PlainTextFormatter() + bad = BadRepr() + with capture_output() as captured: + result = f(bad) + # catches error, returns None + assert result is None + assert "Traceback" in captured.stdout + assert "__repr__" in captured.stdout + assert "ValueError" in captured.stdout + + +class MakePDF(object): + def _repr_pdf_(self): + return 'PDF' + +def test_pdf_formatter(): + pdf = MakePDF() + f = PDFFormatter() + assert f(pdf) == "PDF" + + +def test_print_method_bound(): + f = HTMLFormatter() + class MyHTML(object): + def _repr_html_(self): + return "hello" + with capture_output() as captured: + result = f(MyHTML) + assert result is None + assert "FormatterWarning" not in captured.stderr + + with capture_output() as captured: + result = f(MyHTML()) + assert result == "hello" + assert captured.stderr == "" + + +def test_print_method_weird(): + + class TextMagicHat(object): + def __getattr__(self, key): + return key + + f = HTMLFormatter() + + text_hat = TextMagicHat() + assert text_hat._repr_html_ == "_repr_html_" + with capture_output() as captured: + result = f(text_hat) + + assert result is None + assert "FormatterWarning" not in captured.stderr + + class CallableMagicHat(object): + def __getattr__(self, key): + return lambda : key + + call_hat = CallableMagicHat() + with capture_output() as captured: + result = f(call_hat) + + assert result is None + + class BadReprArgs(object): + def _repr_html_(self, extra, args): + return "html" + + bad = BadReprArgs() + with capture_output() as captured: + result = f(bad) + + assert result is None + assert "FormatterWarning" not in captured.stderr + + +def test_format_config(): + """config objects don't pretend to support fancy reprs with lazy attrs""" + f = HTMLFormatter() + cfg = Config() + with capture_output() as captured: + result = f(cfg) + assert result is None + assert captured.stderr == "" + + with capture_output() as captured: + result = f(Config) + assert result is None + assert captured.stderr == "" + + +def test_pretty_max_seq_length(): + f = PlainTextFormatter(max_seq_length=1) + lis = list(range(3)) + text = f(lis) + assert text == "[0, ...]" + f.max_seq_length = 0 + text = f(lis) + assert text == "[0, 1, 2]" + text = f(list(range(1024))) + lines = text.splitlines() + assert len(lines) == 1024 + + +def test_ipython_display_formatter(): + """Objects with _ipython_display_ defined bypass other formatters""" + f = get_ipython().display_formatter + catcher = [] + class SelfDisplaying(object): + def _ipython_display_(self): + catcher.append(self) + + class NotSelfDisplaying(object): + def __repr__(self): + return "NotSelfDisplaying" + + def _ipython_display_(self): + raise NotImplementedError + + save_enabled = f.ipython_display_formatter.enabled + f.ipython_display_formatter.enabled = True + + yes = SelfDisplaying() + no = NotSelfDisplaying() + + d, md = f.format(no) + assert d == {"text/plain": repr(no)} + assert md == {} + assert catcher == [] + + d, md = f.format(yes) + assert d == {} + assert md == {} + assert catcher == [yes] + + f.ipython_display_formatter.enabled = save_enabled + + +def test_repr_mime(): + class HasReprMime(object): + def _repr_mimebundle_(self, include=None, exclude=None): + return { + 'application/json+test.v2': { + 'x': 'y' + }, + 'plain/text' : '', + 'image/png' : 'i-overwrite' + } + + def _repr_png_(self): + return 'should-be-overwritten' + def _repr_html_(self): + return 'hi!' + + f = get_ipython().display_formatter + html_f = f.formatters['text/html'] + save_enabled = html_f.enabled + html_f.enabled = True + obj = HasReprMime() + d, md = f.format(obj) + html_f.enabled = save_enabled + + assert sorted(d) == [ + "application/json+test.v2", + "image/png", + "plain/text", + "text/html", + "text/plain", + ] + assert md == {} + + d, md = f.format(obj, include={"image/png"}) + assert list(d.keys()) == [ + "image/png" + ], "Include should filter out even things from repr_mimebundle" + + assert d["image/png"] == "i-overwrite", "_repr_mimebundle_ take precedence" + + +def test_pass_correct_include_exclude(): + class Tester(object): + + def __init__(self, include=None, exclude=None): + self.include = include + self.exclude = exclude + + def _repr_mimebundle_(self, include, exclude, **kwargs): + if include and (include != self.include): + raise ValueError('include got modified: display() may be broken.') + if exclude and (exclude != self.exclude): + raise ValueError('exclude got modified: display() may be broken.') + + return None + + include = {'a', 'b', 'c'} + exclude = {'c', 'e' , 'f'} + + f = get_ipython().display_formatter + f.format(Tester(include=include, exclude=exclude), include=include, exclude=exclude) + f.format(Tester(exclude=exclude), exclude=exclude) + f.format(Tester(include=include), include=include) + + +def test_repr_mime_meta(): + class HasReprMimeMeta(object): + def _repr_mimebundle_(self, include=None, exclude=None): + data = { + 'image/png': 'base64-image-data', + } + metadata = { + 'image/png': { + 'width': 5, + 'height': 10, + } + } + return (data, metadata) + + f = get_ipython().display_formatter + obj = HasReprMimeMeta() + d, md = f.format(obj) + assert sorted(d) == ["image/png", "text/plain"] + assert md == { + "image/png": { + "width": 5, + "height": 10, + } + } + + +def test_repr_mime_failure(): + class BadReprMime(object): + def _repr_mimebundle_(self, include=None, exclude=None): + raise RuntimeError + + f = get_ipython().display_formatter + obj = BadReprMime() + d, md = f.format(obj) + assert "text/plain" in d diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_guarded_eval.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_guarded_eval.py new file mode 100644 index 000000000..905cf3ab8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_guarded_eval.py @@ -0,0 +1,570 @@ +from contextlib import contextmanager +from typing import NamedTuple +from functools import partial +from IPython.core.guarded_eval import ( + EvaluationContext, + GuardRejection, + guarded_eval, + _unbind_method, +) +from IPython.testing import decorators as dec +import pytest + + +def create_context(evaluation: str, **kwargs): + return EvaluationContext(locals=kwargs, globals={}, evaluation=evaluation) + + +forbidden = partial(create_context, "forbidden") +minimal = partial(create_context, "minimal") +limited = partial(create_context, "limited") +unsafe = partial(create_context, "unsafe") +dangerous = partial(create_context, "dangerous") + +LIMITED_OR_HIGHER = [limited, unsafe, dangerous] +MINIMAL_OR_HIGHER = [minimal, *LIMITED_OR_HIGHER] + + +@contextmanager +def module_not_installed(module: str): + import sys + + try: + to_restore = sys.modules[module] + del sys.modules[module] + except KeyError: + to_restore = None + try: + yield + finally: + sys.modules[module] = to_restore + + +def test_external_not_installed(): + """ + Because attribute check requires checking if object is not of allowed + external type, this tests logic for absence of external module. + """ + + class Custom: + def __init__(self): + self.test = 1 + + def __getattr__(self, key): + return key + + with module_not_installed("pandas"): + context = limited(x=Custom()) + with pytest.raises(GuardRejection): + guarded_eval("x.test", context) + + +@dec.skip_without("pandas") +def test_external_changed_api(monkeypatch): + """Check that the execution rejects if external API changed paths""" + import pandas as pd + + series = pd.Series([1], index=["a"]) + + with monkeypatch.context() as m: + m.delattr(pd, "Series") + context = limited(data=series) + with pytest.raises(GuardRejection): + guarded_eval("data.iloc[0]", context) + + +@dec.skip_without("pandas") +def test_pandas_series_iloc(): + import pandas as pd + + series = pd.Series([1], index=["a"]) + context = limited(data=series) + assert guarded_eval("data.iloc[0]", context) == 1 + + +def test_rejects_custom_properties(): + class BadProperty: + @property + def iloc(self): + return [None] + + series = BadProperty() + context = limited(data=series) + + with pytest.raises(GuardRejection): + guarded_eval("data.iloc[0]", context) + + +@dec.skip_without("pandas") +def test_accepts_non_overriden_properties(): + import pandas as pd + + class GoodProperty(pd.Series): + pass + + series = GoodProperty([1], index=["a"]) + context = limited(data=series) + + assert guarded_eval("data.iloc[0]", context) == 1 + + +@dec.skip_without("pandas") +def test_pandas_series(): + import pandas as pd + + context = limited(data=pd.Series([1], index=["a"])) + assert guarded_eval('data["a"]', context) == 1 + with pytest.raises(KeyError): + guarded_eval('data["c"]', context) + + +@dec.skip_without("pandas") +def test_pandas_bad_series(): + import pandas as pd + + class BadItemSeries(pd.Series): + def __getitem__(self, key): + return "CUSTOM_ITEM" + + class BadAttrSeries(pd.Series): + def __getattr__(self, key): + return "CUSTOM_ATTR" + + bad_series = BadItemSeries([1], index=["a"]) + context = limited(data=bad_series) + + with pytest.raises(GuardRejection): + guarded_eval('data["a"]', context) + with pytest.raises(GuardRejection): + guarded_eval('data["c"]', context) + + # note: here result is a bit unexpected because + # pandas `__getattr__` calls `__getitem__`; + # FIXME - special case to handle it? + assert guarded_eval("data.a", context) == "CUSTOM_ITEM" + + context = unsafe(data=bad_series) + assert guarded_eval('data["a"]', context) == "CUSTOM_ITEM" + + bad_attr_series = BadAttrSeries([1], index=["a"]) + context = limited(data=bad_attr_series) + assert guarded_eval('data["a"]', context) == 1 + with pytest.raises(GuardRejection): + guarded_eval("data.a", context) + + +@dec.skip_without("pandas") +def test_pandas_dataframe_loc(): + import pandas as pd + from pandas.testing import assert_series_equal + + data = pd.DataFrame([{"a": 1}]) + context = limited(data=data) + assert_series_equal(guarded_eval('data.loc[:, "a"]', context), data["a"]) + + +def test_named_tuple(): + class GoodNamedTuple(NamedTuple): + a: str + pass + + class BadNamedTuple(NamedTuple): + a: str + + def __getitem__(self, key): + return None + + good = GoodNamedTuple(a="x") + bad = BadNamedTuple(a="x") + + context = limited(data=good) + assert guarded_eval("data[0]", context) == "x" + + context = limited(data=bad) + with pytest.raises(GuardRejection): + guarded_eval("data[0]", context) + + +def test_dict(): + context = limited(data={"a": 1, "b": {"x": 2}, ("x", "y"): 3}) + assert guarded_eval('data["a"]', context) == 1 + assert guarded_eval('data["b"]', context) == {"x": 2} + assert guarded_eval('data["b"]["x"]', context) == 2 + assert guarded_eval('data["x", "y"]', context) == 3 + + assert guarded_eval("data.keys", context) + + +def test_set(): + context = limited(data={"a", "b"}) + assert guarded_eval("data.difference", context) + + +def test_list(): + context = limited(data=[1, 2, 3]) + assert guarded_eval("data[1]", context) == 2 + assert guarded_eval("data.copy", context) + + +def test_dict_literal(): + context = limited() + assert guarded_eval("{}", context) == {} + assert guarded_eval('{"a": 1}', context) == {"a": 1} + + +def test_list_literal(): + context = limited() + assert guarded_eval("[]", context) == [] + assert guarded_eval('[1, "a"]', context) == [1, "a"] + + +def test_set_literal(): + context = limited() + assert guarded_eval("set()", context) == set() + assert guarded_eval('{"a"}', context) == {"a"} + + +def test_evaluates_if_expression(): + context = limited() + assert guarded_eval("2 if True else 3", context) == 2 + assert guarded_eval("4 if False else 5", context) == 5 + + +def test_object(): + obj = object() + context = limited(obj=obj) + assert guarded_eval("obj.__dir__", context) == obj.__dir__ + + +@pytest.mark.parametrize( + "code,expected", + [ + ["int.numerator", int.numerator], + ["float.is_integer", float.is_integer], + ["complex.real", complex.real], + ], +) +def test_number_attributes(code, expected): + assert guarded_eval(code, limited()) == expected + + +def test_method_descriptor(): + context = limited() + assert guarded_eval("list.copy.__name__", context) == "copy" + + +@pytest.mark.parametrize( + "data,good,bad,expected", + [ + [[1, 2, 3], "data.index(2)", "data.append(4)", 1], + [{"a": 1}, "data.keys().isdisjoint({})", "data.update()", True], + ], +) +def test_evaluates_calls(data, good, bad, expected): + context = limited(data=data) + assert guarded_eval(good, context) == expected + + with pytest.raises(GuardRejection): + guarded_eval(bad, context) + + +@pytest.mark.parametrize( + "code,expected", + [ + ["(1\n+\n1)", 2], + ["list(range(10))[-1:]", [9]], + ["list(range(20))[3:-2:3]", [3, 6, 9, 12, 15]], + ], +) +@pytest.mark.parametrize("context", LIMITED_OR_HIGHER) +def test_evaluates_complex_cases(code, expected, context): + assert guarded_eval(code, context()) == expected + + +@pytest.mark.parametrize( + "code,expected", + [ + ["1", 1], + ["1.0", 1.0], + ["0xdeedbeef", 0xDEEDBEEF], + ["True", True], + ["None", None], + ["{}", {}], + ["[]", []], + ], +) +@pytest.mark.parametrize("context", MINIMAL_OR_HIGHER) +def test_evaluates_literals(code, expected, context): + assert guarded_eval(code, context()) == expected + + +@pytest.mark.parametrize( + "code,expected", + [ + ["-5", -5], + ["+5", +5], + ["~5", -6], + ], +) +@pytest.mark.parametrize("context", LIMITED_OR_HIGHER) +def test_evaluates_unary_operations(code, expected, context): + assert guarded_eval(code, context()) == expected + + +@pytest.mark.parametrize( + "code,expected", + [ + ["1 + 1", 2], + ["3 - 1", 2], + ["2 * 3", 6], + ["5 // 2", 2], + ["5 / 2", 2.5], + ["5**2", 25], + ["2 >> 1", 1], + ["2 << 1", 4], + ["1 | 2", 3], + ["1 & 1", 1], + ["1 & 2", 0], + ], +) +@pytest.mark.parametrize("context", LIMITED_OR_HIGHER) +def test_evaluates_binary_operations(code, expected, context): + assert guarded_eval(code, context()) == expected + + +@pytest.mark.parametrize( + "code,expected", + [ + ["2 > 1", True], + ["2 < 1", False], + ["2 <= 1", False], + ["2 <= 2", True], + ["1 >= 2", False], + ["2 >= 2", True], + ["2 == 2", True], + ["1 == 2", False], + ["1 != 2", True], + ["1 != 1", False], + ["1 < 4 < 3", False], + ["(1 < 4) < 3", True], + ["4 > 3 > 2 > 1", True], + ["4 > 3 > 2 > 9", False], + ["1 < 2 < 3 < 4", True], + ["9 < 2 < 3 < 4", False], + ["1 < 2 > 1 > 0 > -1 < 1", True], + ["1 in [1] in [[1]]", True], + ["1 in [1] in [[2]]", False], + ["1 in [1]", True], + ["0 in [1]", False], + ["1 not in [1]", False], + ["0 not in [1]", True], + ["True is True", True], + ["False is False", True], + ["True is False", False], + ["True is not True", False], + ["False is not True", True], + ], +) +@pytest.mark.parametrize("context", LIMITED_OR_HIGHER) +def test_evaluates_comparisons(code, expected, context): + assert guarded_eval(code, context()) == expected + + +def test_guards_comparisons(): + class GoodEq(int): + pass + + class BadEq(int): + def __eq__(self, other): + assert False + + context = limited(bad=BadEq(1), good=GoodEq(1)) + + with pytest.raises(GuardRejection): + guarded_eval("bad == 1", context) + + with pytest.raises(GuardRejection): + guarded_eval("bad != 1", context) + + with pytest.raises(GuardRejection): + guarded_eval("1 == bad", context) + + with pytest.raises(GuardRejection): + guarded_eval("1 != bad", context) + + assert guarded_eval("good == 1", context) is True + assert guarded_eval("good != 1", context) is False + assert guarded_eval("1 == good", context) is True + assert guarded_eval("1 != good", context) is False + + +def test_guards_unary_operations(): + class GoodOp(int): + pass + + class BadOpInv(int): + def __inv__(self, other): + assert False + + class BadOpInverse(int): + def __inv__(self, other): + assert False + + context = limited(good=GoodOp(1), bad1=BadOpInv(1), bad2=BadOpInverse(1)) + + with pytest.raises(GuardRejection): + guarded_eval("~bad1", context) + + with pytest.raises(GuardRejection): + guarded_eval("~bad2", context) + + +def test_guards_binary_operations(): + class GoodOp(int): + pass + + class BadOp(int): + def __add__(self, other): + assert False + + context = limited(good=GoodOp(1), bad=BadOp(1)) + + with pytest.raises(GuardRejection): + guarded_eval("1 + bad", context) + + with pytest.raises(GuardRejection): + guarded_eval("bad + 1", context) + + assert guarded_eval("good + 1", context) == 2 + assert guarded_eval("1 + good", context) == 2 + + +def test_guards_attributes(): + class GoodAttr(float): + pass + + class BadAttr1(float): + def __getattr__(self, key): + assert False + + class BadAttr2(float): + def __getattribute__(self, key): + assert False + + context = limited(good=GoodAttr(0.5), bad1=BadAttr1(0.5), bad2=BadAttr2(0.5)) + + with pytest.raises(GuardRejection): + guarded_eval("bad1.as_integer_ratio", context) + + with pytest.raises(GuardRejection): + guarded_eval("bad2.as_integer_ratio", context) + + assert guarded_eval("good.as_integer_ratio()", context) == (1, 2) + + +@pytest.mark.parametrize("context", MINIMAL_OR_HIGHER) +def test_access_builtins(context): + assert guarded_eval("round", context()) == round + + +def test_access_builtins_fails(): + context = limited() + with pytest.raises(NameError): + guarded_eval("this_is_not_builtin", context) + + +def test_rejects_forbidden(): + context = forbidden() + with pytest.raises(GuardRejection): + guarded_eval("1", context) + + +def test_guards_locals_and_globals(): + context = EvaluationContext( + locals={"local_a": "a"}, globals={"global_b": "b"}, evaluation="minimal" + ) + + with pytest.raises(GuardRejection): + guarded_eval("local_a", context) + + with pytest.raises(GuardRejection): + guarded_eval("global_b", context) + + +def test_access_locals_and_globals(): + context = EvaluationContext( + locals={"local_a": "a"}, globals={"global_b": "b"}, evaluation="limited" + ) + assert guarded_eval("local_a", context) == "a" + assert guarded_eval("global_b", context) == "b" + + +@pytest.mark.parametrize( + "code", + ["def func(): pass", "class C: pass", "x = 1", "x += 1", "del x", "import ast"], +) +@pytest.mark.parametrize("context", [minimal(), limited(), unsafe()]) +def test_rejects_side_effect_syntax(code, context): + with pytest.raises(SyntaxError): + guarded_eval(code, context) + + +def test_subscript(): + context = EvaluationContext( + locals={}, globals={}, evaluation="limited", in_subscript=True + ) + empty_slice = slice(None, None, None) + assert guarded_eval("", context) == tuple() + assert guarded_eval(":", context) == empty_slice + assert guarded_eval("1:2:3", context) == slice(1, 2, 3) + assert guarded_eval(':, "a"', context) == (empty_slice, "a") + + +def test_unbind_method(): + class X(list): + def index(self, k): + return "CUSTOM" + + x = X() + assert _unbind_method(x.index) is X.index + assert _unbind_method([].index) is list.index + assert _unbind_method(list.index) is None + + +def test_assumption_instance_attr_do_not_matter(): + """This is semi-specified in Python documentation. + + However, since the specification says 'not guaranted + to work' rather than 'is forbidden to work', future + versions could invalidate this assumptions. This test + is meant to catch such a change if it ever comes true. + """ + + class T: + def __getitem__(self, k): + return "a" + + def __getattr__(self, k): + return "a" + + def f(self): + return "b" + + t = T() + t.__getitem__ = f + t.__getattr__ = f + assert t[1] == "a" + assert t[1] == "a" + + +def test_assumption_named_tuples_share_getitem(): + """Check assumption on named tuples sharing __getitem__""" + from typing import NamedTuple + + class A(NamedTuple): + pass + + class B(NamedTuple): + pass + + assert A.__getitem__ == B.__getitem__ diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_handlers.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_handlers.py new file mode 100644 index 000000000..604dadee1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_handlers.py @@ -0,0 +1,97 @@ +"""Tests for input handlers. +""" +#----------------------------------------------------------------------------- +# Module imports +#----------------------------------------------------------------------------- + +# our own packages +from IPython.core import autocall +from IPython.testing import tools as tt + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- + +# Get the public instance of IPython + +failures = [] +num_tests = 0 + +#----------------------------------------------------------------------------- +# Test functions +#----------------------------------------------------------------------------- + +class CallableIndexable(object): + def __getitem__(self, idx): return True + def __call__(self, *args, **kws): return True + + +class Autocallable(autocall.IPyAutocall): + def __call__(self): + return "called" + + +def run(tests): + """Loop through a list of (pre, post) inputs, where pre is the string + handed to ipython, and post is how that string looks after it's been + transformed (i.e. ipython's notion of _i)""" + tt.check_pairs(ip.prefilter_manager.prefilter_lines, tests) + + +def test_handlers(): + call_idx = CallableIndexable() + ip.user_ns['call_idx'] = call_idx + + # For many of the below, we're also checking that leading whitespace + # turns off the esc char, which it should unless there is a continuation + # line. + run( + [('"no change"', '"no change"'), # normal + (u"lsmagic", "get_ipython().run_line_magic('lsmagic', '')"), # magic + #("a = b # PYTHON-MODE", '_i'), # emacs -- avoids _in cache + ]) + + # Objects which are instances of IPyAutocall are *always* autocalled + autocallable = Autocallable() + ip.user_ns['autocallable'] = autocallable + + # auto + ip.run_line_magic("autocall", "0") + # Only explicit escapes or instances of IPyAutocallable should get + # expanded + run( + [ + ('len "abc"', 'len "abc"'), + ("autocallable", "autocallable()"), + # Don't add extra brackets (gh-1117) + ("autocallable()", "autocallable()"), + ] + ) + ip.run_line_magic("autocall", "1") + run( + [ + ('len "abc"', 'len("abc")'), + ('len "abc";', 'len("abc");'), # ; is special -- moves out of parens + # Autocall is turned off if first arg is [] and the object + # is both callable and indexable. Like so: + ("len [1,2]", "len([1,2])"), # len doesn't support __getitem__... + ("call_idx [1]", "call_idx [1]"), # call_idx *does*.. + ("call_idx 1", "call_idx(1)"), + ("len", "len"), # only at 2 does it auto-call on single args + ] + ) + ip.run_line_magic("autocall", "2") + run( + [ + ('len "abc"', 'len("abc")'), + ('len "abc";', 'len("abc");'), + ("len [1,2]", "len([1,2])"), + ("call_idx [1]", "call_idx [1]"), + ("call_idx 1", "call_idx(1)"), + # This is what's different: + ("len", "len()"), # only at 2 does it auto-call on single args + ] + ) + ip.run_line_magic("autocall", "1") + + assert failures == [] diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_history.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_history.py new file mode 100644 index 000000000..fa64fe04d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_history.py @@ -0,0 +1,306 @@ +# coding: utf-8 +"""Tests for the IPython tab-completion machinery. +""" +#----------------------------------------------------------------------------- +# Module imports +#----------------------------------------------------------------------------- + +# stdlib +import io +import sys +import tempfile +from datetime import datetime +from pathlib import Path + +from tempfile import TemporaryDirectory +# our own packages +from traitlets.config.loader import Config + +from IPython.core.history import HistoryAccessor, HistoryManager, extract_hist_ranges + + +def test_proper_default_encoding(): + assert sys.getdefaultencoding() == "utf-8" + +def test_history(): + ip = get_ipython() + with TemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) + hist_manager_ori = ip.history_manager + hist_file = tmp_path / "history.sqlite" + try: + ip.history_manager = HistoryManager(shell=ip, hist_file=hist_file) + hist = ["a=1", "def f():\n test = 1\n return test", "b='€Æ¾÷ß'"] + for i, h in enumerate(hist, start=1): + ip.history_manager.store_inputs(i, h) + + ip.history_manager.db_log_output = True + # Doesn't match the input, but we'll just check it's stored. + ip.history_manager.output_hist_reprs[3] = "spam" + ip.history_manager.store_output(3) + + assert ip.history_manager.input_hist_raw == [""] + hist + + # Detailed tests for _get_range_session + grs = ip.history_manager._get_range_session + assert list(grs(start=2, stop=-1)) == list(zip([0], [2], hist[1:-1])) + assert list(grs(start=-2)) == list(zip([0, 0], [2, 3], hist[-2:])) + assert list(grs(output=True)) == list( + zip([0, 0, 0], [1, 2, 3], zip(hist, [None, None, "spam"])) + ) + + # Check whether specifying a range beyond the end of the current + # session results in an error (gh-804) + ip.run_line_magic("hist", "2-500") + + # Check that we can write non-ascii characters to a file + ip.run_line_magic("hist", "-f %s" % (tmp_path / "test1")) + ip.run_line_magic("hist", "-pf %s" % (tmp_path / "test2")) + ip.run_line_magic("hist", "-nf %s" % (tmp_path / "test3")) + ip.run_line_magic("save", "%s 1-10" % (tmp_path / "test4")) + + # New session + ip.history_manager.reset() + newcmds = ["z=5", "class X(object):\n pass", "k='p'", "z=5"] + for i, cmd in enumerate(newcmds, start=1): + ip.history_manager.store_inputs(i, cmd) + gothist = ip.history_manager.get_range(start=1, stop=4) + assert list(gothist) == list(zip([0, 0, 0], [1, 2, 3], newcmds)) + # Previous session: + gothist = ip.history_manager.get_range(-1, 1, 4) + assert list(gothist) == list(zip([1, 1, 1], [1, 2, 3], hist)) + + newhist = [(2, i, c) for (i, c) in enumerate(newcmds, 1)] + + # Check get_hist_tail + gothist = ip.history_manager.get_tail(5, output=True, + include_latest=True) + expected = [(1, 3, (hist[-1], "spam"))] \ + + [(s, n, (c, None)) for (s, n, c) in newhist] + assert list(gothist) == expected + + gothist = ip.history_manager.get_tail(2) + expected = newhist[-3:-1] + assert list(gothist) == expected + + # Check get_hist_search + + gothist = ip.history_manager.search("*test*") + assert list(gothist) == [(1, 2, hist[1])] + + gothist = ip.history_manager.search("*=*") + assert list(gothist) == [ + (1, 1, hist[0]), + (1, 2, hist[1]), + (1, 3, hist[2]), + newhist[0], + newhist[2], + newhist[3], + ] + + gothist = ip.history_manager.search("*=*", n=4) + assert list(gothist) == [ + (1, 3, hist[2]), + newhist[0], + newhist[2], + newhist[3], + ] + + gothist = ip.history_manager.search("*=*", unique=True) + assert list(gothist) == [ + (1, 1, hist[0]), + (1, 2, hist[1]), + (1, 3, hist[2]), + newhist[2], + newhist[3], + ] + + gothist = ip.history_manager.search("*=*", unique=True, n=3) + assert list(gothist) == [(1, 3, hist[2]), newhist[2], newhist[3]] + + gothist = ip.history_manager.search("b*", output=True) + assert list(gothist) == [(1, 3, (hist[2], "spam"))] + + # Cross testing: check that magic %save can get previous session. + testfilename = (tmp_path / "test.py").resolve() + ip.run_line_magic("save", str(testfilename) + " ~1/1-3") + with io.open(testfilename, encoding="utf-8") as testfile: + assert testfile.read() == "# coding: utf-8\n" + "\n".join(hist) + "\n" + + # Duplicate line numbers - check that it doesn't crash, and + # gets a new session + ip.history_manager.store_inputs(1, "rogue") + ip.history_manager.writeout_cache() + assert ip.history_manager.session_number == 3 + + # Check that session and line values are not just max values + sessid, lineno, entry = newhist[-1] + assert lineno > 1 + ip.history_manager.reset() + lineno = 1 + ip.history_manager.store_inputs(lineno, entry) + gothist = ip.history_manager.search("*=*", unique=True) + hist = list(gothist)[-1] + assert sessid < hist[0] + assert hist[1:] == (lineno, entry) + finally: + # Ensure saving thread is shut down before we try to clean up the files + ip.history_manager.save_thread.stop() + # Forcibly close database rather than relying on garbage collection + ip.history_manager.db.close() + # Restore history manager + ip.history_manager = hist_manager_ori + + +def test_extract_hist_ranges(): + instr = "1 2/3 ~4/5-6 ~4/7-~4/9 ~9/2-~7/5 ~10/" + expected = [(0, 1, 2), # 0 == current session + (2, 3, 4), + (-4, 5, 7), + (-4, 7, 10), + (-9, 2, None), # None == to end + (-8, 1, None), + (-7, 1, 6), + (-10, 1, None)] + actual = list(extract_hist_ranges(instr)) + assert actual == expected + + +def test_extract_hist_ranges_empty_str(): + instr = "" + expected = [(0, 1, None)] # 0 == current session, None == to end + actual = list(extract_hist_ranges(instr)) + assert actual == expected + + +def test_magic_rerun(): + """Simple test for %rerun (no args -> rerun last line)""" + ip = get_ipython() + ip.run_cell("a = 10", store_history=True) + ip.run_cell("a += 1", store_history=True) + assert ip.user_ns["a"] == 11 + ip.run_cell("%rerun", store_history=True) + assert ip.user_ns["a"] == 12 + +def test_timestamp_type(): + ip = get_ipython() + info = ip.history_manager.get_session_info() + assert isinstance(info[1], datetime) + +def test_hist_file_config(): + cfg = Config() + tfile = tempfile.NamedTemporaryFile(delete=False) + cfg.HistoryManager.hist_file = Path(tfile.name) + try: + hm = HistoryManager(shell=get_ipython(), config=cfg) + assert hm.hist_file == cfg.HistoryManager.hist_file + finally: + try: + Path(tfile.name).unlink() + except OSError: + # same catch as in testing.tools.TempFileMixin + # On Windows, even though we close the file, we still can't + # delete it. I have no clue why + pass + +def test_histmanager_disabled(): + """Ensure that disabling the history manager doesn't create a database.""" + cfg = Config() + cfg.HistoryAccessor.enabled = False + + ip = get_ipython() + with TemporaryDirectory() as tmpdir: + hist_manager_ori = ip.history_manager + hist_file = Path(tmpdir) / "history.sqlite" + cfg.HistoryManager.hist_file = hist_file + try: + ip.history_manager = HistoryManager(shell=ip, config=cfg) + hist = ["a=1", "def f():\n test = 1\n return test", "b='€Æ¾÷ß'"] + for i, h in enumerate(hist, start=1): + ip.history_manager.store_inputs(i, h) + assert ip.history_manager.input_hist_raw == [""] + hist + ip.history_manager.reset() + ip.history_manager.end_session() + finally: + ip.history_manager = hist_manager_ori + + # hist_file should not be created + assert hist_file.exists() is False + + +def test_get_tail_session_awareness(): + """Test .get_tail() is: + - session specific in HistoryManager + - session agnostic in HistoryAccessor + same for .get_last_session_id() + """ + ip = get_ipython() + with TemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) + hist_file = tmp_path / "history.sqlite" + get_source = lambda x: x[2] + hm1 = None + hm2 = None + ha = None + try: + # hm1 creates a new session and adds history entries, + # ha catches up + hm1 = HistoryManager(shell=ip, hist_file=hist_file) + hm1_last_sid = hm1.get_last_session_id + ha = HistoryAccessor(hist_file=hist_file) + ha_last_sid = ha.get_last_session_id + + hist1 = ["a=1", "b=1", "c=1"] + for i, h in enumerate(hist1 + [""], start=1): + hm1.store_inputs(i, h) + assert list(map(get_source, hm1.get_tail())) == hist1 + assert list(map(get_source, ha.get_tail())) == hist1 + sid1 = hm1_last_sid() + assert sid1 is not None + assert ha_last_sid() == sid1 + + # hm2 creates a new session and adds entries, + # ha catches up + hm2 = HistoryManager(shell=ip, hist_file=hist_file) + hm2_last_sid = hm2.get_last_session_id + + hist2 = ["a=2", "b=2", "c=2"] + for i, h in enumerate(hist2 + [""], start=1): + hm2.store_inputs(i, h) + tail = hm2.get_tail(n=3) + assert list(map(get_source, tail)) == hist2 + tail = ha.get_tail(n=3) + assert list(map(get_source, tail)) == hist2 + sid2 = hm2_last_sid() + assert sid2 is not None + assert ha_last_sid() == sid2 + assert sid2 != sid1 + + # but hm1 still maintains its point of reference + # and adding more entries to it doesn't change others + # immediate perspective + assert hm1_last_sid() == sid1 + tail = hm1.get_tail(n=3) + assert list(map(get_source, tail)) == hist1 + + hist3 = ["a=3", "b=3", "c=3"] + for i, h in enumerate(hist3 + [""], start=5): + hm1.store_inputs(i, h) + tail = hm1.get_tail(n=7) + assert list(map(get_source, tail)) == hist1 + [""] + hist3 + tail = hm2.get_tail(n=3) + assert list(map(get_source, tail)) == hist2 + tail = ha.get_tail(n=3) + assert list(map(get_source, tail)) == hist2 + assert hm1_last_sid() == sid1 + assert hm2_last_sid() == sid2 + assert ha_last_sid() == sid2 + finally: + if hm1: + hm1.save_thread.stop() + hm1.db.close() + if hm2: + hm2.save_thread.stop() + hm2.db.close() + if ha: + ha.db.close() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_hooks.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_hooks.py new file mode 100644 index 000000000..6e0b1c152 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_hooks.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +"""Tests for CommandChainDispatcher.""" + + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import pytest +from IPython.core.error import TryNext +from IPython.core.hooks import CommandChainDispatcher + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +# Define two classes, one which succeeds and one which raises TryNext. Each +# sets the attribute `called` to True when it is called. +class Okay(object): + def __init__(self, message): + self.message = message + self.called = False + def __call__(self): + self.called = True + return self.message + +class Fail(object): + def __init__(self, message): + self.message = message + self.called = False + def __call__(self): + self.called = True + raise TryNext(self.message) + +#----------------------------------------------------------------------------- +# Test functions +#----------------------------------------------------------------------------- + +def test_command_chain_dispatcher_ff(): + """Test two failing hooks""" + fail1 = Fail("fail1") + fail2 = Fail("fail2") + dp = CommandChainDispatcher([(0, fail1), (10, fail2)]) + + with pytest.raises(TryNext) as e: + dp() + assert str(e.value) == "fail2" + + assert fail1.called is True + assert fail2.called is True + +def test_command_chain_dispatcher_fofo(): + """Test a mixture of failing and succeeding hooks.""" + fail1 = Fail("fail1") + fail2 = Fail("fail2") + okay1 = Okay("okay1") + okay2 = Okay("okay2") + + dp = CommandChainDispatcher([(0, fail1), + # (5, okay1), # add this later + (10, fail2), + (15, okay2)]) + dp.add(okay1, 5) + + assert dp() == "okay1" + + assert fail1.called is True + assert okay1.called is True + assert fail2.called is False + assert okay2.called is False + +def test_command_chain_dispatcher_eq_priority(): + okay1 = Okay(u'okay1') + okay2 = Okay(u'okay2') + dp = CommandChainDispatcher([(1, okay1)]) + dp.add(okay2, 1) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_imports.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_imports.py new file mode 100644 index 000000000..7aa278fb6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_imports.py @@ -0,0 +1,52 @@ +# encoding: utf-8 + +def test_import_completer(): + from IPython.core import completer + +def test_import_crashhandler(): + from IPython.core import crashhandler + +def test_import_debugger(): + from IPython.core import debugger + +def test_import_excolors(): + from IPython.core import excolors + +def test_import_history(): + from IPython.core import history + +def test_import_hooks(): + from IPython.core import hooks + +def test_import_getipython(): + from IPython.core import getipython + +def test_import_interactiveshell(): + from IPython.core import interactiveshell + +def test_import_logger(): + from IPython.core import logger + +def test_import_macro(): + from IPython.core import macro + +def test_import_magic(): + from IPython.core import magic + +def test_import_oinspect(): + from IPython.core import oinspect + +def test_import_prefilter(): + from IPython.core import prefilter + +def test_import_prompts(): + from IPython.core import prompts + +def test_import_release(): + from IPython.core import release + +def test_import_ultratb(): + from IPython.core import ultratb + +def test_import_usage(): + from IPython.core import usage diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputsplitter.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputsplitter.py new file mode 100644 index 000000000..5a2289577 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputsplitter.py @@ -0,0 +1,642 @@ +# -*- coding: utf-8 -*- +"""Tests for the inputsplitter module.""" + + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import unittest +import pytest +import sys + +with pytest.warns(DeprecationWarning, match="inputsplitter"): + from IPython.core import inputsplitter as isp +from IPython.core.inputtransformer import InputTransformer +from IPython.core.tests.test_inputtransformer import syntax, syntax_ml +from IPython.testing import tools as tt + +#----------------------------------------------------------------------------- +# Semi-complete examples (also used as tests) +#----------------------------------------------------------------------------- + +# Note: at the bottom, there's a slightly more complete version of this that +# can be useful during development of code here. + +def mini_interactive_loop(input_func): + """Minimal example of the logic of an interactive interpreter loop. + + This serves as an example, and it is used by the test system with a fake + raw_input that simulates interactive input.""" + + from IPython.core.inputsplitter import InputSplitter + + isp = InputSplitter() + # In practice, this input loop would be wrapped in an outside loop to read + # input indefinitely, until some exit/quit command was issued. Here we + # only illustrate the basic inner loop. + while isp.push_accepts_more(): + indent = ' '*isp.get_indent_spaces() + prompt = '>>> ' + indent + line = indent + input_func(prompt) + isp.push(line) + + # Here we just return input so we can use it in a test suite, but a real + # interpreter would instead send it for execution somewhere. + src = isp.source_reset() + #print 'Input source was:\n', src # dbg + return src + +#----------------------------------------------------------------------------- +# Test utilities, just for local use +#----------------------------------------------------------------------------- + + +def pseudo_input(lines): + """Return a function that acts like raw_input but feeds the input list.""" + ilines = iter(lines) + def raw_in(prompt): + try: + return next(ilines) + except StopIteration: + return '' + return raw_in + +#----------------------------------------------------------------------------- +# Tests +#----------------------------------------------------------------------------- +def test_spaces(): + tests = [('', 0), + (' ', 1), + ('\n', 0), + (' \n', 1), + ('x', 0), + (' x', 1), + (' x',2), + (' x',4), + # Note: tabs are counted as a single whitespace! + ('\tx', 1), + ('\t x', 2), + ] + tt.check_pairs(isp.num_ini_spaces, tests) + + +def test_remove_comments(): + tests = [('text', 'text'), + ('text # comment', 'text '), + ('text # comment\n', 'text \n'), + ('text # comment \n', 'text \n'), + ('line # c \nline\n','line \nline\n'), + ('line # c \nline#c2 \nline\nline #c\n\n', + 'line \nline\nline\nline \n\n'), + ] + tt.check_pairs(isp.remove_comments, tests) + + +def test_get_input_encoding(): + encoding = isp.get_input_encoding() + assert isinstance(encoding, str) + # simple-minded check that at least encoding a simple string works with the + # encoding we got. + assert "test".encode(encoding) == b"test" + + +class NoInputEncodingTestCase(unittest.TestCase): + def setUp(self): + self.old_stdin = sys.stdin + class X: pass + fake_stdin = X() + sys.stdin = fake_stdin + + def test(self): + # Verify that if sys.stdin has no 'encoding' attribute we do the right + # thing + enc = isp.get_input_encoding() + self.assertEqual(enc, 'ascii') + + def tearDown(self): + sys.stdin = self.old_stdin + + +class InputSplitterTestCase(unittest.TestCase): + def setUp(self): + self.isp = isp.InputSplitter() + + def test_reset(self): + isp = self.isp + isp.push('x=1') + isp.reset() + self.assertEqual(isp._buffer, []) + self.assertEqual(isp.get_indent_spaces(), 0) + self.assertEqual(isp.source, '') + self.assertEqual(isp.code, None) + self.assertEqual(isp._is_complete, False) + + def test_source(self): + self.isp._store('1') + self.isp._store('2') + self.assertEqual(self.isp.source, '1\n2\n') + self.assertEqual(len(self.isp._buffer)>0, True) + self.assertEqual(self.isp.source_reset(), '1\n2\n') + self.assertEqual(self.isp._buffer, []) + self.assertEqual(self.isp.source, '') + + def test_indent(self): + isp = self.isp # shorthand + isp.push('x=1') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n x=1') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('y=2\n') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_indent2(self): + isp = self.isp + isp.push('if 1:') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push(' x=1') + self.assertEqual(isp.get_indent_spaces(), 4) + # Blank lines shouldn't change the indent level + isp.push(' '*2) + self.assertEqual(isp.get_indent_spaces(), 4) + + def test_indent3(self): + isp = self.isp + # When a multiline statement contains parens or multiline strings, we + # shouldn't get confused. + isp.push("if 1:") + isp.push(" x = (1+\n 2)") + self.assertEqual(isp.get_indent_spaces(), 4) + + def test_indent4(self): + isp = self.isp + # whitespace after ':' should not screw up indent level + isp.push('if 1: \n x=1') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('y=2\n') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\t\n x=1') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('y=2\n') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_dedent_pass(self): + isp = self.isp # shorthand + # should NOT cause dedent + isp.push('if 1:\n passes = 5') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('if 1:\n pass') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n pass ') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_dedent_break(self): + isp = self.isp # shorthand + # should NOT cause dedent + isp.push('while 1:\n breaks = 5') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('while 1:\n break') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('while 1:\n break ') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_dedent_continue(self): + isp = self.isp # shorthand + # should NOT cause dedent + isp.push('while 1:\n continues = 5') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('while 1:\n continue') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('while 1:\n continue ') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_dedent_raise(self): + isp = self.isp # shorthand + # should NOT cause dedent + isp.push('if 1:\n raised = 4') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('if 1:\n raise TypeError()') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n raise') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n raise ') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_dedent_return(self): + isp = self.isp # shorthand + # should NOT cause dedent + isp.push('if 1:\n returning = 4') + self.assertEqual(isp.get_indent_spaces(), 4) + isp.push('if 1:\n return 5 + 493') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n return') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n return ') + self.assertEqual(isp.get_indent_spaces(), 0) + isp.push('if 1:\n return(0)') + self.assertEqual(isp.get_indent_spaces(), 0) + + def test_push(self): + isp = self.isp + self.assertEqual(isp.push('x=1'), True) + + def test_push2(self): + isp = self.isp + self.assertEqual(isp.push('if 1:'), False) + for line in [' x=1', '# a comment', ' y=2']: + print(line) + self.assertEqual(isp.push(line), True) + + def test_push3(self): + isp = self.isp + isp.push('if True:') + isp.push(' a = 1') + self.assertEqual(isp.push('b = [1,'), False) + + def test_push_accepts_more(self): + isp = self.isp + isp.push('x=1') + self.assertEqual(isp.push_accepts_more(), False) + + def test_push_accepts_more2(self): + isp = self.isp + isp.push('if 1:') + self.assertEqual(isp.push_accepts_more(), True) + isp.push(' x=1') + self.assertEqual(isp.push_accepts_more(), True) + isp.push('') + self.assertEqual(isp.push_accepts_more(), False) + + def test_push_accepts_more3(self): + isp = self.isp + isp.push("x = (2+\n3)") + self.assertEqual(isp.push_accepts_more(), False) + + def test_push_accepts_more4(self): + isp = self.isp + # When a multiline statement contains parens or multiline strings, we + # shouldn't get confused. + # FIXME: we should be able to better handle de-dents in statements like + # multiline strings and multiline expressions (continued with \ or + # parens). Right now we aren't handling the indentation tracking quite + # correctly with this, though in practice it may not be too much of a + # problem. We'll need to see. + isp.push("if 1:") + isp.push(" x = (2+") + isp.push(" 3)") + self.assertEqual(isp.push_accepts_more(), True) + isp.push(" y = 3") + self.assertEqual(isp.push_accepts_more(), True) + isp.push('') + self.assertEqual(isp.push_accepts_more(), False) + + def test_push_accepts_more5(self): + isp = self.isp + isp.push('try:') + isp.push(' a = 5') + isp.push('except:') + isp.push(' raise') + # We want to be able to add an else: block at this point, so it should + # wait for a blank line. + self.assertEqual(isp.push_accepts_more(), True) + + def test_continuation(self): + isp = self.isp + isp.push("import os, \\") + self.assertEqual(isp.push_accepts_more(), True) + isp.push("sys") + self.assertEqual(isp.push_accepts_more(), False) + + def test_syntax_error(self): + isp = self.isp + # Syntax errors immediately produce a 'ready' block, so the invalid + # Python can be sent to the kernel for evaluation with possible ipython + # special-syntax conversion. + isp.push('run foo') + self.assertEqual(isp.push_accepts_more(), False) + + def test_unicode(self): + self.isp.push(u"Pérez") + self.isp.push(u'\xc3\xa9') + self.isp.push(u"u'\xc3\xa9'") + + @pytest.mark.xfail( + reason="Bug in python 3.9.8 – bpo 45738", + condition=sys.version_info in [(3, 9, 8, "final", 0), (3, 11, 0, "alpha", 2)], + raises=SystemError, + strict=True, + ) + def test_line_continuation(self): + """ Test issue #2108.""" + isp = self.isp + # A blank line after a line continuation should not accept more + isp.push("1 \\\n\n") + self.assertEqual(isp.push_accepts_more(), False) + # Whitespace after a \ is a SyntaxError. The only way to test that + # here is to test that push doesn't accept more (as with + # test_syntax_error() above). + isp.push(r"1 \ ") + self.assertEqual(isp.push_accepts_more(), False) + # Even if the line is continuable (c.f. the regular Python + # interpreter) + isp.push(r"(1 \ ") + self.assertEqual(isp.push_accepts_more(), False) + + def test_check_complete(self): + isp = self.isp + self.assertEqual(isp.check_complete("a = 1"), ('complete', None)) + self.assertEqual(isp.check_complete("for a in range(5):"), ('incomplete', 4)) + self.assertEqual(isp.check_complete("raise = 2"), ('invalid', None)) + self.assertEqual(isp.check_complete("a = [1,\n2,"), ('incomplete', 0)) + self.assertEqual(isp.check_complete("def a():\n x=1\n global x"), ('invalid', None)) + +class InteractiveLoopTestCase(unittest.TestCase): + """Tests for an interactive loop like a python shell. + """ + def check_ns(self, lines, ns): + """Validate that the given input lines produce the resulting namespace. + + Note: the input lines are given exactly as they would be typed in an + auto-indenting environment, as mini_interactive_loop above already does + auto-indenting and prepends spaces to the input. + """ + src = mini_interactive_loop(pseudo_input(lines)) + test_ns = {} + exec(src, test_ns) + # We can't check that the provided ns is identical to the test_ns, + # because Python fills test_ns with extra keys (copyright, etc). But + # we can check that the given dict is *contained* in test_ns + for k,v in ns.items(): + self.assertEqual(test_ns[k], v) + + def test_simple(self): + self.check_ns(['x=1'], dict(x=1)) + + def test_simple2(self): + self.check_ns(['if 1:', 'x=2'], dict(x=2)) + + def test_xy(self): + self.check_ns(['x=1; y=2'], dict(x=1, y=2)) + + def test_abc(self): + self.check_ns(['if 1:','a=1','b=2','c=3'], dict(a=1, b=2, c=3)) + + def test_multi(self): + self.check_ns(['x =(1+','1+','2)'], dict(x=4)) + + +class IPythonInputTestCase(InputSplitterTestCase): + """By just creating a new class whose .isp is a different instance, we + re-run the same test battery on the new input splitter. + + In addition, this runs the tests over the syntax and syntax_ml dicts that + were tested by individual functions, as part of the OO interface. + + It also makes some checks on the raw buffer storage. + """ + + def setUp(self): + self.isp = isp.IPythonInputSplitter() + + def test_syntax(self): + """Call all single-line syntax tests from the main object""" + isp = self.isp + for example in syntax.values(): + for raw, out_t in example: + if raw.startswith(' '): + continue + + isp.push(raw+'\n') + out_raw = isp.source_raw + out = isp.source_reset() + self.assertEqual(out.rstrip(), out_t, + tt.pair_fail_msg.format("inputsplitter",raw, out_t, out)) + self.assertEqual(out_raw.rstrip(), raw.rstrip()) + + def test_syntax_multiline(self): + isp = self.isp + for example in syntax_ml.values(): + for line_pairs in example: + out_t_parts = [] + raw_parts = [] + for lraw, out_t_part in line_pairs: + if out_t_part is not None: + out_t_parts.append(out_t_part) + + if lraw is not None: + isp.push(lraw) + raw_parts.append(lraw) + + out_raw = isp.source_raw + out = isp.source_reset() + out_t = '\n'.join(out_t_parts).rstrip() + raw = '\n'.join(raw_parts).rstrip() + self.assertEqual(out.rstrip(), out_t) + self.assertEqual(out_raw.rstrip(), raw) + + def test_syntax_multiline_cell(self): + isp = self.isp + for example in syntax_ml.values(): + + out_t_parts = [] + for line_pairs in example: + raw = '\n'.join(r for r, _ in line_pairs if r is not None) + out_t = '\n'.join(t for _,t in line_pairs if t is not None) + out = isp.transform_cell(raw) + # Match ignoring trailing whitespace + self.assertEqual(out.rstrip(), out_t.rstrip()) + + def test_cellmagic_preempt(self): + isp = self.isp + for raw, name, line, cell in [ + ("%%cellm a\nIn[1]:", u'cellm', u'a', u'In[1]:'), + ("%%cellm \nline\n>>> hi", u'cellm', u'', u'line\n>>> hi'), + (">>> %%cellm \nline\n>>> hi", u'cellm', u'', u'line\nhi'), + ("%%cellm \n>>> hi", u'cellm', u'', u'>>> hi'), + ("%%cellm \nline1\nline2", u'cellm', u'', u'line1\nline2'), + ("%%cellm \nline1\\\\\nline2", u'cellm', u'', u'line1\\\\\nline2'), + ]: + expected = "get_ipython().run_cell_magic(%r, %r, %r)" % ( + name, line, cell + ) + out = isp.transform_cell(raw) + self.assertEqual(out.rstrip(), expected.rstrip()) + + def test_multiline_passthrough(self): + isp = self.isp + class CommentTransformer(InputTransformer): + def __init__(self): + self._lines = [] + + def push(self, line): + self._lines.append(line + '#') + + def reset(self): + text = '\n'.join(self._lines) + self._lines = [] + return text + + isp.physical_line_transforms.insert(0, CommentTransformer()) + + for raw, expected in [ + ("a=5", "a=5#"), + ("%ls foo", "get_ipython().run_line_magic(%r, %r)" % (u'ls', u'foo#')), + ("!ls foo\n%ls bar", "get_ipython().system(%r)\nget_ipython().run_line_magic(%r, %r)" % ( + u'ls foo#', u'ls', u'bar#' + )), + ("1\n2\n3\n%ls foo\n4\n5", "1#\n2#\n3#\nget_ipython().run_line_magic(%r, %r)\n4#\n5#" % (u'ls', u'foo#')), + ]: + out = isp.transform_cell(raw) + self.assertEqual(out.rstrip(), expected.rstrip()) + +#----------------------------------------------------------------------------- +# Main - use as a script, mostly for developer experiments +#----------------------------------------------------------------------------- + +if __name__ == '__main__': + # A simple demo for interactive experimentation. This code will not get + # picked up by any test suite. + from IPython.core.inputsplitter import IPythonInputSplitter + + # configure here the syntax to use, prompt and whether to autoindent + #isp, start_prompt = InputSplitter(), '>>> ' + isp, start_prompt = IPythonInputSplitter(), 'In> ' + + autoindent = True + #autoindent = False + + try: + while True: + prompt = start_prompt + while isp.push_accepts_more(): + indent = ' '*isp.get_indent_spaces() + if autoindent: + line = indent + input(prompt+indent) + else: + line = input(prompt) + isp.push(line) + prompt = '... ' + + # Here we just return input so we can use it in a test suite, but a + # real interpreter would instead send it for execution somewhere. + #src = isp.source; raise EOFError # dbg + raw = isp.source_raw + src = isp.source_reset() + print('Input source was:\n', src) + print('Raw source was:\n', raw) + except EOFError: + print('Bye') + +# Tests for cell magics support + +def test_last_blank(): + assert isp.last_blank("") is False + assert isp.last_blank("abc") is False + assert isp.last_blank("abc\n") is False + assert isp.last_blank("abc\na") is False + + assert isp.last_blank("\n") is True + assert isp.last_blank("\n ") is True + assert isp.last_blank("abc\n ") is True + assert isp.last_blank("abc\n\n") is True + assert isp.last_blank("abc\nd\n\n") is True + assert isp.last_blank("abc\nd\ne\n\n") is True + assert isp.last_blank("abc \n \n \n\n") is True + + +def test_last_two_blanks(): + assert isp.last_two_blanks("") is False + assert isp.last_two_blanks("abc") is False + assert isp.last_two_blanks("abc\n") is False + assert isp.last_two_blanks("abc\n\na") is False + assert isp.last_two_blanks("abc\n \n") is False + assert isp.last_two_blanks("abc\n\n") is False + + assert isp.last_two_blanks("\n\n") is True + assert isp.last_two_blanks("\n\n ") is True + assert isp.last_two_blanks("\n \n") is True + assert isp.last_two_blanks("abc\n\n ") is True + assert isp.last_two_blanks("abc\n\n\n") is True + assert isp.last_two_blanks("abc\n\n \n") is True + assert isp.last_two_blanks("abc\n\n \n ") is True + assert isp.last_two_blanks("abc\n\n \n \n") is True + assert isp.last_two_blanks("abc\nd\n\n\n") is True + assert isp.last_two_blanks("abc\nd\ne\nf\n\n\n") is True + + +class CellMagicsCommon(object): + + def test_whole_cell(self): + src = "%%cellm line\nbody\n" + out = self.sp.transform_cell(src) + ref = "get_ipython().run_cell_magic('cellm', 'line', 'body')\n" + assert out == ref + + def test_cellmagic_help(self): + self.sp.push('%%cellm?') + assert self.sp.push_accepts_more() is False + + def tearDown(self): + self.sp.reset() + + +class CellModeCellMagics(CellMagicsCommon, unittest.TestCase): + sp = isp.IPythonInputSplitter(line_input_checker=False) + + def test_incremental(self): + sp = self.sp + sp.push("%%cellm firstline\n") + assert sp.push_accepts_more() is True # 1 + sp.push("line2\n") + assert sp.push_accepts_more() is True # 2 + sp.push("\n") + # This should accept a blank line and carry on until the cell is reset + assert sp.push_accepts_more() is True # 3 + + def test_no_strip_coding(self): + src = '\n'.join([ + '%%writefile foo.py', + '# coding: utf-8', + 'print(u"üñîçø∂é")', + ]) + out = self.sp.transform_cell(src) + assert "# coding: utf-8" in out + + +class LineModeCellMagics(CellMagicsCommon, unittest.TestCase): + sp = isp.IPythonInputSplitter(line_input_checker=True) + + def test_incremental(self): + sp = self.sp + sp.push("%%cellm line2\n") + assert sp.push_accepts_more() is True # 1 + sp.push("\n") + # In this case, a blank line should end the cell magic + assert sp.push_accepts_more() is False # 2 + + +indentation_samples = [ + ('a = 1', 0), + ('for a in b:', 4), + ('def f():', 4), + ('def f(): #comment', 4), + ('a = ":#not a comment"', 0), + ('def f():\n a = 1', 4), + ('def f():\n return 1', 0), + ('for a in b:\n' + ' if a < 0:' + ' continue', 3), + ('a = {', 4), + ('a = {\n' + ' 1,', 5), + ('b = """123', 0), + ('', 0), + ('def f():\n pass', 0), + ('class Bar:\n def f():\n pass', 4), + ('class Bar:\n def f():\n raise', 4), +] + +def test_find_next_indent(): + for code, exp in indentation_samples: + res = isp.find_next_indent(code) + msg = "{!r} != {!r} (expected)\n Code: {!r}".format(res, exp, code) + assert res == exp, msg diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer.py new file mode 100644 index 000000000..bfc936d31 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer.py @@ -0,0 +1,469 @@ +import tokenize + +from IPython.testing import tools as tt + +from IPython.core import inputtransformer as ipt + +def transform_and_reset(transformer): + transformer = transformer() + def transform(inp): + try: + return transformer.push(inp) + finally: + transformer.reset() + + return transform + +# Transformer tests +def transform_checker(tests, transformer, **kwargs): + """Utility to loop over test inputs""" + transformer = transformer(**kwargs) + try: + for inp, tr in tests: + if inp is None: + out = transformer.reset() + else: + out = transformer.push(inp) + assert out == tr + finally: + transformer.reset() + +# Data for all the syntax tests in the form of lists of pairs of +# raw/transformed input. We store it here as a global dict so that we can use +# it both within single-function tests and also to validate the behavior of the +# larger objects + +syntax = \ + dict(assign_system = + [('a =! ls', "a = get_ipython().getoutput('ls')"), + ('b = !ls', "b = get_ipython().getoutput('ls')"), + ('c= !ls', "c = get_ipython().getoutput('ls')"), + ('d == !ls', 'd == !ls'), # Invalid syntax, but we leave == alone. + ('x=1', 'x=1'), # normal input is unmodified + (' ',' '), # blank lines are kept intact + # Tuple unpacking + ("a, b = !echo 'a\\nb'", "a, b = get_ipython().getoutput(\"echo 'a\\\\nb'\")"), + ("a,= !echo 'a'", "a, = get_ipython().getoutput(\"echo 'a'\")"), + ("a, *bc = !echo 'a\\nb\\nc'", "a, *bc = get_ipython().getoutput(\"echo 'a\\\\nb\\\\nc'\")"), + # Tuple unpacking with regular Python expressions, not our syntax. + ("a, b = range(2)", "a, b = range(2)"), + ("a, = range(1)", "a, = range(1)"), + ("a, *bc = range(3)", "a, *bc = range(3)"), + ], + + assign_magic = + [('a =% who', "a = get_ipython().run_line_magic('who', '')"), + ('b = %who', "b = get_ipython().run_line_magic('who', '')"), + ('c= %ls', "c = get_ipython().run_line_magic('ls', '')"), + ('d == %ls', 'd == %ls'), # Invalid syntax, but we leave == alone. + ('x=1', 'x=1'), # normal input is unmodified + (' ',' '), # blank lines are kept intact + ("a, b = %foo", "a, b = get_ipython().run_line_magic('foo', '')"), + ], + classic_prompt=[ + (">>> x=1", "x=1"), + ("x=1", "x=1"), # normal input is unmodified + (" ", " "), # blank lines are kept intact + ], + ipy_prompt=[ + ("In [1]: x=1", "x=1"), + ("x=1", "x=1"), # normal input is unmodified + (" ", " "), # blank lines are kept intact + ], + # Tests for the escape transformer to leave normal code alone + escaped_noesc=[ + (" ", " "), + ("x=1", "x=1"), + ], + # System calls + escaped_shell=[ + ("!ls", "get_ipython().system('ls')"), + # Double-escape shell, this means to capture the output of the + # subprocess and return it + ("!!ls", "get_ipython().getoutput('ls')"), + ], + # Help/object info + escaped_help=[ + ("?", "get_ipython().show_usage()"), + ("?x1", "get_ipython().run_line_magic('pinfo', 'x1')"), + ("??x2", "get_ipython().run_line_magic('pinfo2', 'x2')"), + ("?a.*s", "get_ipython().run_line_magic('psearch', 'a.*s')"), + ("?%hist1", "get_ipython().run_line_magic('pinfo', '%hist1')"), + ("?%%hist2", "get_ipython().run_line_magic('pinfo', '%%hist2')"), + ("?abc = qwe", "get_ipython().run_line_magic('pinfo', 'abc')"), + ], + end_help=[ + ("x3?", "get_ipython().run_line_magic('pinfo', 'x3')"), + ("x4??", "get_ipython().run_line_magic('pinfo2', 'x4')"), + ("%hist1?", "get_ipython().run_line_magic('pinfo', '%hist1')"), + ("%hist2??", "get_ipython().run_line_magic('pinfo2', '%hist2')"), + ("%%hist3?", "get_ipython().run_line_magic('pinfo', '%%hist3')"), + ("%%hist4??", "get_ipython().run_line_magic('pinfo2', '%%hist4')"), + ("π.foo?", "get_ipython().run_line_magic('pinfo', 'π.foo')"), + ("f*?", "get_ipython().run_line_magic('psearch', 'f*')"), + ("ax.*aspe*?", "get_ipython().run_line_magic('psearch', 'ax.*aspe*')"), + ("a = abc?", "get_ipython().run_line_magic('pinfo', 'abc')"), + ("a = abc.qe??", "get_ipython().run_line_magic('pinfo2', 'abc.qe')"), + ("a = *.items?", "get_ipython().run_line_magic('psearch', '*.items')"), + ("plot(a?", "get_ipython().run_line_magic('pinfo', 'a')"), + ("a*2 #comment?", "a*2 #comment?"), + ], + # Explicit magic calls + escaped_magic=[ + ("%cd", "get_ipython().run_line_magic('cd', '')"), + ("%cd /home", "get_ipython().run_line_magic('cd', '/home')"), + # Backslashes need to be escaped. + ("%cd C:\\User", "get_ipython().run_line_magic('cd', 'C:\\\\User')"), + (" %magic", " get_ipython().run_line_magic('magic', '')"), + ], + # Quoting with separate arguments + escaped_quote=[ + (",f", 'f("")'), + (",f x", 'f("x")'), + (" ,f y", ' f("y")'), + (",f a b", 'f("a", "b")'), + ], + # Quoting with single argument + escaped_quote2=[ + (";f", 'f("")'), + (";f x", 'f("x")'), + (" ;f y", ' f("y")'), + (";f a b", 'f("a b")'), + ], + # Simply apply parens + escaped_paren=[ + ("/f", "f()"), + ("/f x", "f(x)"), + (" /f y", " f(y)"), + ("/f a b", "f(a, b)"), + ], + # Check that we transform prompts before other transforms + mixed=[ + ("In [1]: %lsmagic", "get_ipython().run_line_magic('lsmagic', '')"), + (">>> %lsmagic", "get_ipython().run_line_magic('lsmagic', '')"), + ("In [2]: !ls", "get_ipython().system('ls')"), + ("In [3]: abs?", "get_ipython().run_line_magic('pinfo', 'abs')"), + ("In [4]: b = %who", "b = get_ipython().run_line_magic('who', '')"), + ], +) + +# multiline syntax examples. Each of these should be a list of lists, with +# each entry itself having pairs of raw/transformed input. The union (with +# '\n'.join() of the transformed inputs is what the splitter should produce +# when fed the raw lines one at a time via push. +syntax_ml = \ + dict(classic_prompt = + [ [('>>> for i in range(10):','for i in range(10):'), + ('... print i',' print i'), + ('... ', ''), + ], + [('>>> a="""','a="""'), + ('... 123"""','123"""'), + ], + [('a="""','a="""'), + ('... 123','123'), + ('... 456"""','456"""'), + ], + [('a="""','a="""'), + ('>>> 123','123'), + ('... 456"""','456"""'), + ], + [('a="""','a="""'), + ('123','123'), + ('... 456"""','... 456"""'), + ], + [('....__class__','....__class__'), + ], + [('a=5', 'a=5'), + ('...', ''), + ], + [('>>> def f(x):', 'def f(x):'), + ('...', ''), + ('... return x', ' return x'), + ], + [('board = """....', 'board = """....'), + ('....', '....'), + ('...."""', '...."""'), + ], + ], + + ipy_prompt = + [ [('In [24]: for i in range(10):','for i in range(10):'), + (' ....: print i',' print i'), + (' ....: ', ''), + ], + [('In [24]: for i in range(10):','for i in range(10):'), + # Qt console prompts expand with spaces, not dots + (' ...: print i',' print i'), + (' ...: ', ''), + ], + [('In [24]: for i in range(10):','for i in range(10):'), + # Sometimes whitespace preceding '...' has been removed + ('...: print i',' print i'), + ('...: ', ''), + ], + [('In [24]: for i in range(10):','for i in range(10):'), + # Space after last continuation prompt has been removed (issue #6674) + ('...: print i',' print i'), + ('...:', ''), + ], + [('In [2]: a="""','a="""'), + (' ...: 123"""','123"""'), + ], + [('a="""','a="""'), + (' ...: 123','123'), + (' ...: 456"""','456"""'), + ], + [('a="""','a="""'), + ('In [1]: 123','123'), + (' ...: 456"""','456"""'), + ], + [('a="""','a="""'), + ('123','123'), + (' ...: 456"""',' ...: 456"""'), + ], + ], + + multiline_datastructure_prompt = + [ [('>>> a = [1,','a = [1,'), + ('... 2]','2]'), + ], + ], + + multiline_datastructure = + [ [('b = ("%s"', None), + ('# comment', None), + ('%foo )', 'b = ("%s"\n# comment\n%foo )'), + ], + ], + + multiline_string = + [ [("'''foo?", None), + ("bar'''", "'''foo?\nbar'''"), + ], + ], + + leading_indent = + [ [(' print "hi"','print "hi"'), + ], + [(' for a in range(5):','for a in range(5):'), + (' a*2',' a*2'), + ], + [(' a="""','a="""'), + (' 123"""','123"""'), + ], + [('a="""','a="""'), + (' 123"""',' 123"""'), + ], + ], + + cellmagic = + [ [('%%foo a', None), + (None, "get_ipython().run_cell_magic('foo', 'a', '')"), + ], + [('%%bar 123', None), + ('hello', None), + (None , "get_ipython().run_cell_magic('bar', '123', 'hello')"), + ], + [('a=5', 'a=5'), + ('%%cellmagic', '%%cellmagic'), + ], + ], + + escaped = + [ [('%abc def \\', None), + ('ghi', "get_ipython().run_line_magic('abc', 'def ghi')"), + ], + [('%abc def \\', None), + ('ghi\\', None), + (None, "get_ipython().run_line_magic('abc', 'def ghi')"), + ], + ], + + assign_magic = + [ [('a = %bc de \\', None), + ('fg', "a = get_ipython().run_line_magic('bc', 'de fg')"), + ], + [('a = %bc de \\', None), + ('fg\\', None), + (None, "a = get_ipython().run_line_magic('bc', 'de fg')"), + ], + ], + + assign_system = + [ [('a = !bc de \\', None), + ('fg', "a = get_ipython().getoutput('bc de fg')"), + ], + [('a = !bc de \\', None), + ('fg\\', None), + (None, "a = get_ipython().getoutput('bc de fg')"), + ], + ], + ) + + +def test_assign_system(): + tt.check_pairs(transform_and_reset(ipt.assign_from_system), syntax['assign_system']) + +def test_assign_magic(): + tt.check_pairs(transform_and_reset(ipt.assign_from_magic), syntax['assign_magic']) + +def test_classic_prompt(): + tt.check_pairs(transform_and_reset(ipt.classic_prompt), syntax['classic_prompt']) + for example in syntax_ml['classic_prompt']: + transform_checker(example, ipt.classic_prompt) + for example in syntax_ml['multiline_datastructure_prompt']: + transform_checker(example, ipt.classic_prompt) + + # Check that we don't transform the second line if the first is obviously + # IPython syntax + transform_checker([ + ('%foo', '%foo'), + ('>>> bar', '>>> bar'), + ], ipt.classic_prompt) + + +def test_ipy_prompt(): + tt.check_pairs(transform_and_reset(ipt.ipy_prompt), syntax['ipy_prompt']) + for example in syntax_ml['ipy_prompt']: + transform_checker(example, ipt.ipy_prompt) + + # Check that we don't transform the second line if we're inside a cell magic + transform_checker([ + ('%%foo', '%%foo'), + ('In [1]: bar', 'In [1]: bar'), + ], ipt.ipy_prompt) + +def test_assemble_logical_lines(): + tests = \ + [ [("a = \\", None), + ("123", "a = 123"), + ], + [("a = \\", None), # Test resetting when within a multi-line string + ("12 *\\", None), + (None, "a = 12 *"), + ], + [("# foo\\", "# foo\\"), # Comments can't be continued like this + ], + ] + for example in tests: + transform_checker(example, ipt.assemble_logical_lines) + +def test_assemble_python_lines(): + tests = \ + [ [("a = '''", None), + ("abc'''", "a = '''\nabc'''"), + ], + [("a = '''", None), # Test resetting when within a multi-line string + ("def", None), + (None, "a = '''\ndef"), + ], + [("a = [1,", None), + ("2]", "a = [1,\n2]"), + ], + [("a = [1,", None), # Test resetting when within a multi-line string + ("2,", None), + (None, "a = [1,\n2,"), + ], + [("a = '''", None), # Test line continuation within a multi-line string + ("abc\\", None), + ("def", None), + ("'''", "a = '''\nabc\\\ndef\n'''"), + ], + ] + syntax_ml['multiline_datastructure'] + for example in tests: + transform_checker(example, ipt.assemble_python_lines) + + +def test_help_end(): + tt.check_pairs(transform_and_reset(ipt.help_end), syntax['end_help']) + +def test_escaped_noesc(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_noesc']) + + +def test_escaped_shell(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_shell']) + + +def test_escaped_help(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_help']) + + +def test_escaped_magic(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_magic']) + + +def test_escaped_quote(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_quote']) + + +def test_escaped_quote2(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_quote2']) + + +def test_escaped_paren(): + tt.check_pairs(transform_and_reset(ipt.escaped_commands), syntax['escaped_paren']) + + +def test_cellmagic(): + for example in syntax_ml['cellmagic']: + transform_checker(example, ipt.cellmagic) + + line_example = [('%%bar 123', None), + ('hello', None), + ('' , "get_ipython().run_cell_magic('bar', '123', 'hello')"), + ] + transform_checker(line_example, ipt.cellmagic, end_on_blank_line=True) + +def test_has_comment(): + tests = [('text', False), + ('text #comment', True), + ('text #comment\n', True), + ('#comment', True), + ('#comment\n', True), + ('a = "#string"', False), + ('a = "#string" # comment', True), + ('a #comment not "string"', True), + ] + tt.check_pairs(ipt.has_comment, tests) + +@ipt.TokenInputTransformer.wrap +def decistmt(tokens): + """Substitute Decimals for floats in a string of statements. + + Based on an example from the tokenize module docs. + """ + result = [] + for toknum, tokval, _, _, _ in tokens: + if toknum == tokenize.NUMBER and '.' in tokval: # replace NUMBER tokens + yield from [ + (tokenize.NAME, 'Decimal'), + (tokenize.OP, '('), + (tokenize.STRING, repr(tokval)), + (tokenize.OP, ')') + ] + else: + yield (toknum, tokval) + + + +def test_token_input_transformer(): + tests = [('1.2', "Decimal ('1.2')"), + ('"1.2"', '"1.2"'), + ] + tt.check_pairs(transform_and_reset(decistmt), tests) + ml_tests = \ + [ [("a = 1.2; b = '''x", None), + ("y'''", "a =Decimal ('1.2');b ='''x\ny'''"), + ], + [("a = [1.2,", None), + ("3]", "a =[Decimal ('1.2'),\n3 ]"), + ], + [("a = '''foo", None), # Test resetting when within a multi-line string + ("bar", None), + (None, "a = '''foo\nbar"), + ], + ] + for example in ml_tests: + transform_checker(example, decistmt) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer2.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer2.py new file mode 100644 index 000000000..cddb32f71 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer2.py @@ -0,0 +1,443 @@ +"""Tests for the token-based transformers in IPython.core.inputtransformer2 + +Line-based transformers are the simpler ones; token-based transformers are +more complex. See test_inputtransformer2_line for tests for line-based +transformations. +""" +import platform +import string +import sys +from textwrap import dedent + +import pytest + +from IPython.core import inputtransformer2 as ipt2 +from IPython.core.inputtransformer2 import _find_assign_op, make_tokens_by_line + +MULTILINE_MAGIC = ( + """\ +a = f() +%foo \\ +bar +g() +""".splitlines( + keepends=True + ), + (2, 0), + """\ +a = f() +get_ipython().run_line_magic('foo', ' bar') +g() +""".splitlines( + keepends=True + ), +) + +INDENTED_MAGIC = ( + """\ +for a in range(5): + %ls +""".splitlines( + keepends=True + ), + (2, 4), + """\ +for a in range(5): + get_ipython().run_line_magic('ls', '') +""".splitlines( + keepends=True + ), +) + +CRLF_MAGIC = ( + ["a = f()\n", "%ls\r\n", "g()\n"], + (2, 0), + ["a = f()\n", "get_ipython().run_line_magic('ls', '')\n", "g()\n"], +) + +MULTILINE_MAGIC_ASSIGN = ( + """\ +a = f() +b = %foo \\ + bar +g() +""".splitlines( + keepends=True + ), + (2, 4), + """\ +a = f() +b = get_ipython().run_line_magic('foo', ' bar') +g() +""".splitlines( + keepends=True + ), +) + +MULTILINE_SYSTEM_ASSIGN = ("""\ +a = f() +b = !foo \\ + bar +g() +""".splitlines(keepends=True), (2, 4), """\ +a = f() +b = get_ipython().getoutput('foo bar') +g() +""".splitlines(keepends=True)) + +##### + +MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT = ( + """\ +def test(): + for i in range(1): + print(i) + res =! ls +""".splitlines( + keepends=True + ), + (4, 7), + """\ +def test(): + for i in range(1): + print(i) + res =get_ipython().getoutput(\' ls\') +""".splitlines( + keepends=True + ), +) + +###### + +AUTOCALL_QUOTE = ([",f 1 2 3\n"], (1, 0), ['f("1", "2", "3")\n']) + +AUTOCALL_QUOTE2 = ([";f 1 2 3\n"], (1, 0), ['f("1 2 3")\n']) + +AUTOCALL_PAREN = (["/f 1 2 3\n"], (1, 0), ["f(1, 2, 3)\n"]) + +SIMPLE_HELP = (["foo?\n"], (1, 0), ["get_ipython().run_line_magic('pinfo', 'foo')\n"]) + +DETAILED_HELP = ( + ["foo??\n"], + (1, 0), + ["get_ipython().run_line_magic('pinfo2', 'foo')\n"], +) + +MAGIC_HELP = (["%foo?\n"], (1, 0), ["get_ipython().run_line_magic('pinfo', '%foo')\n"]) + +HELP_IN_EXPR = ( + ["a = b + c?\n"], + (1, 0), + ["get_ipython().run_line_magic('pinfo', 'c')\n"], +) + +HELP_CONTINUED_LINE = ( + """\ +a = \\ +zip? +""".splitlines( + keepends=True + ), + (1, 0), + [r"get_ipython().run_line_magic('pinfo', 'zip')" + "\n"], +) + +HELP_MULTILINE = ( + """\ +(a, +b) = zip? +""".splitlines( + keepends=True + ), + (1, 0), + [r"get_ipython().run_line_magic('pinfo', 'zip')" + "\n"], +) + +HELP_UNICODE = ( + ["π.foo?\n"], + (1, 0), + ["get_ipython().run_line_magic('pinfo', 'π.foo')\n"], +) + + +def null_cleanup_transformer(lines): + """ + A cleanup transform that returns an empty list. + """ + return [] + + +def test_check_make_token_by_line_never_ends_empty(): + """ + Check that not sequence of single or double characters ends up leading to en empty list of tokens + """ + from string import printable + + for c in printable: + assert make_tokens_by_line(c)[-1] != [] + for k in printable: + assert make_tokens_by_line(c + k)[-1] != [] + + +def check_find(transformer, case, match=True): + sample, expected_start, _ = case + tbl = make_tokens_by_line(sample) + res = transformer.find(tbl) + if match: + # start_line is stored 0-indexed, expected values are 1-indexed + assert (res.start_line + 1, res.start_col) == expected_start + return res + else: + assert res is None + + +def check_transform(transformer_cls, case): + lines, start, expected = case + transformer = transformer_cls(start) + assert transformer.transform(lines) == expected + + +def test_continued_line(): + lines = MULTILINE_MAGIC_ASSIGN[0] + assert ipt2.find_end_of_continued_line(lines, 1) == 2 + + assert ipt2.assemble_continued_line(lines, (1, 5), 2) == "foo bar" + + +def test_find_assign_magic(): + check_find(ipt2.MagicAssign, MULTILINE_MAGIC_ASSIGN) + check_find(ipt2.MagicAssign, MULTILINE_SYSTEM_ASSIGN, match=False) + check_find(ipt2.MagicAssign, MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT, match=False) + + +def test_transform_assign_magic(): + check_transform(ipt2.MagicAssign, MULTILINE_MAGIC_ASSIGN) + + +def test_find_assign_system(): + check_find(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN) + check_find(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT) + check_find(ipt2.SystemAssign, (["a = !ls\n"], (1, 5), None)) + check_find(ipt2.SystemAssign, (["a=!ls\n"], (1, 2), None)) + check_find(ipt2.SystemAssign, MULTILINE_MAGIC_ASSIGN, match=False) + + +def test_transform_assign_system(): + check_transform(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN) + check_transform(ipt2.SystemAssign, MULTILINE_SYSTEM_ASSIGN_AFTER_DEDENT) + + +def test_find_magic_escape(): + check_find(ipt2.EscapedCommand, MULTILINE_MAGIC) + check_find(ipt2.EscapedCommand, INDENTED_MAGIC) + check_find(ipt2.EscapedCommand, MULTILINE_MAGIC_ASSIGN, match=False) + + +def test_transform_magic_escape(): + check_transform(ipt2.EscapedCommand, MULTILINE_MAGIC) + check_transform(ipt2.EscapedCommand, INDENTED_MAGIC) + check_transform(ipt2.EscapedCommand, CRLF_MAGIC) + + +def test_find_autocalls(): + for case in [AUTOCALL_QUOTE, AUTOCALL_QUOTE2, AUTOCALL_PAREN]: + print("Testing %r" % case[0]) + check_find(ipt2.EscapedCommand, case) + + +def test_transform_autocall(): + for case in [AUTOCALL_QUOTE, AUTOCALL_QUOTE2, AUTOCALL_PAREN]: + print("Testing %r" % case[0]) + check_transform(ipt2.EscapedCommand, case) + + +def test_find_help(): + for case in [SIMPLE_HELP, DETAILED_HELP, MAGIC_HELP, HELP_IN_EXPR]: + check_find(ipt2.HelpEnd, case) + + tf = check_find(ipt2.HelpEnd, HELP_CONTINUED_LINE) + assert tf.q_line == 1 + assert tf.q_col == 3 + + tf = check_find(ipt2.HelpEnd, HELP_MULTILINE) + assert tf.q_line == 1 + assert tf.q_col == 8 + + # ? in a comment does not trigger help + check_find(ipt2.HelpEnd, (["foo # bar?\n"], None, None), match=False) + # Nor in a string + check_find(ipt2.HelpEnd, (["foo = '''bar?\n"], None, None), match=False) + + +def test_transform_help(): + tf = ipt2.HelpEnd((1, 0), (1, 9)) + assert tf.transform(HELP_IN_EXPR[0]) == HELP_IN_EXPR[2] + + tf = ipt2.HelpEnd((1, 0), (2, 3)) + assert tf.transform(HELP_CONTINUED_LINE[0]) == HELP_CONTINUED_LINE[2] + + tf = ipt2.HelpEnd((1, 0), (2, 8)) + assert tf.transform(HELP_MULTILINE[0]) == HELP_MULTILINE[2] + + tf = ipt2.HelpEnd((1, 0), (1, 0)) + assert tf.transform(HELP_UNICODE[0]) == HELP_UNICODE[2] + + +def test_find_assign_op_dedent(): + """ + be careful that empty token like dedent are not counted as parens + """ + + class Tk: + def __init__(self, s): + self.string = s + + assert _find_assign_op([Tk(s) for s in ("", "a", "=", "b")]) == 2 + assert ( + _find_assign_op([Tk(s) for s in ("", "(", "a", "=", "b", ")", "=", "5")]) == 6 + ) + + +examples = [ + pytest.param("a = 1", "complete", None), + pytest.param("for a in range(5):", "incomplete", 4), + pytest.param("for a in range(5):\n if a > 0:", "incomplete", 8), + pytest.param("raise = 2", "invalid", None), + pytest.param("a = [1,\n2,", "incomplete", 0), + pytest.param("(\n))", "incomplete", 0), + pytest.param("\\\r\n", "incomplete", 0), + pytest.param("a = '''\n hi", "incomplete", 3), + pytest.param("def a():\n x=1\n global x", "invalid", None), + pytest.param( + "a \\ ", + "invalid", + None, + marks=pytest.mark.xfail( + reason="Bug in python 3.9.8 – bpo 45738", + condition=sys.version_info + in [(3, 9, 8, "final", 0), (3, 11, 0, "alpha", 2)], + raises=SystemError, + strict=True, + ), + ), # Nothing allowed after backslash, + pytest.param("1\\\n+2", "complete", None), +] + + +@pytest.mark.parametrize("code, expected, number", examples) +def test_check_complete_param(code, expected, number): + cc = ipt2.TransformerManager().check_complete + assert cc(code) == (expected, number) + + +@pytest.mark.xfail(platform.python_implementation() == "PyPy", reason="fail on pypy") +@pytest.mark.xfail( + reason="Bug in python 3.9.8 – bpo 45738", + condition=sys.version_info in [(3, 9, 8, "final", 0), (3, 11, 0, "alpha", 2)], + raises=SystemError, + strict=True, +) +def test_check_complete(): + cc = ipt2.TransformerManager().check_complete + + example = dedent( + """ + if True: + a=1""" + ) + + assert cc(example) == ("incomplete", 4) + assert cc(example + "\n") == ("complete", None) + assert cc(example + "\n ") == ("complete", None) + + # no need to loop on all the letters/numbers. + short = "12abAB" + string.printable[62:] + for c in short: + # test does not raise: + cc(c) + for k in short: + cc(c + k) + + assert cc("def f():\n x=0\n \\\n ") == ("incomplete", 2) + + +@pytest.mark.xfail(platform.python_implementation() == "PyPy", reason="fail on pypy") +@pytest.mark.parametrize( + "value, expected", + [ + ('''def foo():\n """''', ("incomplete", 4)), + ("""async with example:\n pass""", ("incomplete", 4)), + ("""async with example:\n pass\n """, ("complete", None)), + ], +) +def test_check_complete_II(value, expected): + """ + Test that multiple line strings are properly handled. + + Separate test function for convenience + + """ + cc = ipt2.TransformerManager().check_complete + assert cc(value) == expected + + +@pytest.mark.parametrize( + "value, expected", + [ + (")", ("invalid", None)), + ("]", ("invalid", None)), + ("}", ("invalid", None)), + (")(", ("invalid", None)), + ("][", ("invalid", None)), + ("}{", ("invalid", None)), + ("]()(", ("invalid", None)), + ("())(", ("invalid", None)), + (")[](", ("invalid", None)), + ("()](", ("invalid", None)), + ], +) +def test_check_complete_invalidates_sunken_brackets(value, expected): + """ + Test that a single line with more closing brackets than the opening ones is + interpreted as invalid + """ + cc = ipt2.TransformerManager().check_complete + assert cc(value) == expected + + +def test_null_cleanup_transformer(): + manager = ipt2.TransformerManager() + manager.cleanup_transforms.insert(0, null_cleanup_transformer) + assert manager.transform_cell("") == "" + + +def test_side_effects_I(): + count = 0 + + def counter(lines): + nonlocal count + count += 1 + return lines + + counter.has_side_effects = True + + manager = ipt2.TransformerManager() + manager.cleanup_transforms.insert(0, counter) + assert manager.check_complete("a=1\n") == ("complete", None) + assert count == 0 + + +def test_side_effects_II(): + count = 0 + + def counter(lines): + nonlocal count + count += 1 + return lines + + counter.has_side_effects = True + + manager = ipt2.TransformerManager() + manager.line_transforms.insert(0, counter) + assert manager.check_complete("b=1\n") == ("complete", None) + assert count == 0 diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer2_line.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer2_line.py new file mode 100644 index 000000000..ec7a87364 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_inputtransformer2_line.py @@ -0,0 +1,167 @@ +"""Tests for the line-based transformers in IPython.core.inputtransformer2 + +Line-based transformers are the simpler ones; token-based transformers are +more complex. See test_inputtransformer2 for tests for token-based transformers. +""" + +from IPython.core import inputtransformer2 as ipt2 + +CELL_MAGIC = ("""\ +%%foo arg +body 1 +body 2 +""", """\ +get_ipython().run_cell_magic('foo', 'arg', 'body 1\\nbody 2\\n') +""") + +def test_cell_magic(): + for sample, expected in [CELL_MAGIC]: + assert ipt2.cell_magic(sample.splitlines(keepends=True)) == expected.splitlines( + keepends=True + ) + +CLASSIC_PROMPT = ("""\ +>>> for a in range(5): +... print(a) +""", """\ +for a in range(5): + print(a) +""") + +CLASSIC_PROMPT_L2 = ("""\ +for a in range(5): +... print(a) +... print(a ** 2) +""", """\ +for a in range(5): + print(a) + print(a ** 2) +""") + +def test_classic_prompt(): + for sample, expected in [CLASSIC_PROMPT, CLASSIC_PROMPT_L2]: + assert ipt2.classic_prompt( + sample.splitlines(keepends=True) + ) == expected.splitlines(keepends=True) + +IPYTHON_PROMPT = ("""\ +In [1]: for a in range(5): + ...: print(a) +""", """\ +for a in range(5): + print(a) +""") + +IPYTHON_PROMPT_L2 = ("""\ +for a in range(5): + ...: print(a) + ...: print(a ** 2) +""", """\ +for a in range(5): + print(a) + print(a ** 2) +""") + + +IPYTHON_PROMPT_VI_INS = ( + """\ +[ins] In [11]: def a(): + ...: 123 + ...: + ...: 123 +""", + """\ +def a(): + 123 + +123 +""", +) + +IPYTHON_PROMPT_VI_NAV = ( + """\ +[nav] In [11]: def a(): + ...: 123 + ...: + ...: 123 +""", + """\ +def a(): + 123 + +123 +""", +) + + +def test_ipython_prompt(): + for sample, expected in [ + IPYTHON_PROMPT, + IPYTHON_PROMPT_L2, + IPYTHON_PROMPT_VI_INS, + IPYTHON_PROMPT_VI_NAV, + ]: + assert ipt2.ipython_prompt( + sample.splitlines(keepends=True) + ) == expected.splitlines(keepends=True) + + +INDENT_SPACES = ("""\ + if True: + a = 3 +""", """\ +if True: + a = 3 +""") + +INDENT_TABS = ("""\ +\tif True: +\t\tb = 4 +""", """\ +if True: +\tb = 4 +""") + +def test_leading_indent(): + for sample, expected in [INDENT_SPACES, INDENT_TABS]: + assert ipt2.leading_indent( + sample.splitlines(keepends=True) + ) == expected.splitlines(keepends=True) + +LEADING_EMPTY_LINES = ("""\ + \t + +if True: + a = 3 + +b = 4 +""", """\ +if True: + a = 3 + +b = 4 +""") + +ONLY_EMPTY_LINES = ("""\ + \t + +""", """\ + \t + +""") + +def test_leading_empty_lines(): + for sample, expected in [LEADING_EMPTY_LINES, ONLY_EMPTY_LINES]: + assert ipt2.leading_empty_lines( + sample.splitlines(keepends=True) + ) == expected.splitlines(keepends=True) + +CRLF_MAGIC = ([ + "%%ls\r\n" +], [ + "get_ipython().run_cell_magic('ls', '', '')\n" +]) + +def test_crlf_magic(): + for sample, expected in [CRLF_MAGIC]: + assert ipt2.cell_magic(sample) == expected diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_interactiveshell.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_interactiveshell.py new file mode 100644 index 000000000..982bd5a3b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_interactiveshell.py @@ -0,0 +1,1112 @@ +# -*- coding: utf-8 -*- +"""Tests for the key interactiveshell module. + +Historically the main classes in interactiveshell have been under-tested. This +module should grow as many single-method tests as possible to trap many of the +recurring bugs we seem to encounter with high-level interaction. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import asyncio +import ast +import os +import signal +import shutil +import sys +import tempfile +import unittest +from unittest import mock + +from os.path import join + +from IPython.core.error import InputRejected +from IPython.core.inputtransformer import InputTransformer +from IPython.core import interactiveshell +from IPython.testing.decorators import ( + skipif, skip_win32, onlyif_unicode_paths, onlyif_cmds_exist, +) +from IPython.testing import tools as tt +from IPython.utils.process import find_cmd + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- +# This is used by every single test, no point repeating it ad nauseam + +#----------------------------------------------------------------------------- +# Tests +#----------------------------------------------------------------------------- + +class DerivedInterrupt(KeyboardInterrupt): + pass + +class InteractiveShellTestCase(unittest.TestCase): + def test_naked_string_cells(self): + """Test that cells with only naked strings are fully executed""" + # First, single-line inputs + ip.run_cell('"a"\n') + self.assertEqual(ip.user_ns['_'], 'a') + # And also multi-line cells + ip.run_cell('"""a\nb"""\n') + self.assertEqual(ip.user_ns['_'], 'a\nb') + + def test_run_empty_cell(self): + """Just make sure we don't get a horrible error with a blank + cell of input. Yes, I did overlook that.""" + old_xc = ip.execution_count + res = ip.run_cell('') + self.assertEqual(ip.execution_count, old_xc) + self.assertEqual(res.execution_count, None) + + def test_run_cell_multiline(self): + """Multi-block, multi-line cells must execute correctly. + """ + src = '\n'.join(["x=1", + "y=2", + "if 1:", + " x += 1", + " y += 1",]) + res = ip.run_cell(src) + self.assertEqual(ip.user_ns['x'], 2) + self.assertEqual(ip.user_ns['y'], 3) + self.assertEqual(res.success, True) + self.assertEqual(res.result, None) + + def test_multiline_string_cells(self): + "Code sprinkled with multiline strings should execute (GH-306)" + ip.run_cell('tmp=0') + self.assertEqual(ip.user_ns['tmp'], 0) + res = ip.run_cell('tmp=1;"""a\nb"""\n') + self.assertEqual(ip.user_ns['tmp'], 1) + self.assertEqual(res.success, True) + self.assertEqual(res.result, "a\nb") + + def test_dont_cache_with_semicolon(self): + "Ending a line with semicolon should not cache the returned object (GH-307)" + oldlen = len(ip.user_ns['Out']) + for cell in ['1;', '1;1;']: + res = ip.run_cell(cell, store_history=True) + newlen = len(ip.user_ns['Out']) + self.assertEqual(oldlen, newlen) + self.assertIsNone(res.result) + i = 0 + #also test the default caching behavior + for cell in ['1', '1;1']: + ip.run_cell(cell, store_history=True) + newlen = len(ip.user_ns['Out']) + i += 1 + self.assertEqual(oldlen+i, newlen) + + def test_syntax_error(self): + res = ip.run_cell("raise = 3") + self.assertIsInstance(res.error_before_exec, SyntaxError) + + def test_open_standard_input_stream(self): + res = ip.run_cell("open(0)") + self.assertIsInstance(res.error_in_exec, ValueError) + + def test_open_standard_output_stream(self): + res = ip.run_cell("open(1)") + self.assertIsInstance(res.error_in_exec, ValueError) + + def test_open_standard_error_stream(self): + res = ip.run_cell("open(2)") + self.assertIsInstance(res.error_in_exec, ValueError) + + def test_In_variable(self): + "Verify that In variable grows with user input (GH-284)" + oldlen = len(ip.user_ns['In']) + ip.run_cell('1;', store_history=True) + newlen = len(ip.user_ns['In']) + self.assertEqual(oldlen+1, newlen) + self.assertEqual(ip.user_ns['In'][-1],'1;') + + def test_magic_names_in_string(self): + ip.run_cell('a = """\n%exit\n"""') + self.assertEqual(ip.user_ns['a'], '\n%exit\n') + + def test_trailing_newline(self): + """test that running !(command) does not raise a SyntaxError""" + ip.run_cell('!(true)\n', False) + ip.run_cell('!(true)\n\n\n', False) + + def test_gh_597(self): + """Pretty-printing lists of objects with non-ascii reprs may cause + problems.""" + class Spam(object): + def __repr__(self): + return "\xe9"*50 + import IPython.core.formatters + f = IPython.core.formatters.PlainTextFormatter() + f([Spam(),Spam()]) + + + def test_future_flags(self): + """Check that future flags are used for parsing code (gh-777)""" + ip.run_cell('from __future__ import barry_as_FLUFL') + try: + ip.run_cell('prfunc_return_val = 1 <> 2') + assert 'prfunc_return_val' in ip.user_ns + finally: + # Reset compiler flags so we don't mess up other tests. + ip.compile.reset_compiler_flags() + + def test_can_pickle(self): + "Can we pickle objects defined interactively (GH-29)" + ip = get_ipython() + ip.reset() + ip.run_cell(("class Mylist(list):\n" + " def __init__(self,x=[]):\n" + " list.__init__(self,x)")) + ip.run_cell("w=Mylist([1,2,3])") + + from pickle import dumps + + # We need to swap in our main module - this is only necessary + # inside the test framework, because IPython puts the interactive module + # in place (but the test framework undoes this). + _main = sys.modules['__main__'] + sys.modules['__main__'] = ip.user_module + try: + res = dumps(ip.user_ns["w"]) + finally: + sys.modules['__main__'] = _main + self.assertTrue(isinstance(res, bytes)) + + def test_global_ns(self): + "Code in functions must be able to access variables outside them." + ip = get_ipython() + ip.run_cell("a = 10") + ip.run_cell(("def f(x):\n" + " return x + a")) + ip.run_cell("b = f(12)") + self.assertEqual(ip.user_ns["b"], 22) + + def test_bad_custom_tb(self): + """Check that InteractiveShell is protected from bad custom exception handlers""" + ip.set_custom_exc((IOError,), lambda etype,value,tb: 1/0) + self.assertEqual(ip.custom_exceptions, (IOError,)) + with tt.AssertPrints("Custom TB Handler failed", channel='stderr'): + ip.run_cell(u'raise IOError("foo")') + self.assertEqual(ip.custom_exceptions, ()) + + def test_bad_custom_tb_return(self): + """Check that InteractiveShell is protected from bad return types in custom exception handlers""" + ip.set_custom_exc((NameError,),lambda etype,value,tb, tb_offset=None: 1) + self.assertEqual(ip.custom_exceptions, (NameError,)) + with tt.AssertPrints("Custom TB Handler failed", channel='stderr'): + ip.run_cell(u'a=abracadabra') + self.assertEqual(ip.custom_exceptions, ()) + + def test_drop_by_id(self): + myvars = {"a":object(), "b":object(), "c": object()} + ip.push(myvars, interactive=False) + for name in myvars: + assert name in ip.user_ns, name + assert name in ip.user_ns_hidden, name + ip.user_ns['b'] = 12 + ip.drop_by_id(myvars) + for name in ["a", "c"]: + assert name not in ip.user_ns, name + assert name not in ip.user_ns_hidden, name + assert ip.user_ns['b'] == 12 + ip.reset() + + def test_var_expand(self): + ip.user_ns['f'] = u'Ca\xf1o' + self.assertEqual(ip.var_expand(u'echo $f'), u'echo Ca\xf1o') + self.assertEqual(ip.var_expand(u'echo {f}'), u'echo Ca\xf1o') + self.assertEqual(ip.var_expand(u'echo {f[:-1]}'), u'echo Ca\xf1') + self.assertEqual(ip.var_expand(u'echo {1*2}'), u'echo 2') + + self.assertEqual(ip.var_expand(u"grep x | awk '{print $1}'"), u"grep x | awk '{print $1}'") + + ip.user_ns['f'] = b'Ca\xc3\xb1o' + # This should not raise any exception: + ip.var_expand(u'echo $f') + + def test_var_expand_local(self): + """Test local variable expansion in !system and %magic calls""" + # !system + ip.run_cell( + "def test():\n" + ' lvar = "ttt"\n' + " ret = !echo {lvar}\n" + " return ret[0]\n" + ) + res = ip.user_ns["test"]() + self.assertIn("ttt", res) + + # %magic + ip.run_cell( + "def makemacro():\n" + ' macroname = "macro_var_expand_locals"\n' + " %macro {macroname} codestr\n" + ) + ip.user_ns["codestr"] = "str(12)" + ip.run_cell("makemacro()") + self.assertIn("macro_var_expand_locals", ip.user_ns) + + def test_var_expand_self(self): + """Test variable expansion with the name 'self', which was failing. + + See https://github.com/ipython/ipython/issues/1878#issuecomment-7698218 + """ + ip.run_cell( + "class cTest:\n" + ' classvar="see me"\n' + " def test(self):\n" + " res = !echo Variable: {self.classvar}\n" + " return res[0]\n" + ) + self.assertIn("see me", ip.user_ns["cTest"]().test()) + + def test_bad_var_expand(self): + """var_expand on invalid formats shouldn't raise""" + # SyntaxError + self.assertEqual(ip.var_expand(u"{'a':5}"), u"{'a':5}") + # NameError + self.assertEqual(ip.var_expand(u"{asdf}"), u"{asdf}") + # ZeroDivisionError + self.assertEqual(ip.var_expand(u"{1/0}"), u"{1/0}") + + def test_silent_postexec(self): + """run_cell(silent=True) doesn't invoke pre/post_run_cell callbacks""" + pre_explicit = mock.Mock() + pre_always = mock.Mock() + post_explicit = mock.Mock() + post_always = mock.Mock() + all_mocks = [pre_explicit, pre_always, post_explicit, post_always] + + ip.events.register('pre_run_cell', pre_explicit) + ip.events.register('pre_execute', pre_always) + ip.events.register('post_run_cell', post_explicit) + ip.events.register('post_execute', post_always) + + try: + ip.run_cell("1", silent=True) + assert pre_always.called + assert not pre_explicit.called + assert post_always.called + assert not post_explicit.called + # double-check that non-silent exec did what we expected + # silent to avoid + ip.run_cell("1") + assert pre_explicit.called + assert post_explicit.called + info, = pre_explicit.call_args[0] + result, = post_explicit.call_args[0] + self.assertEqual(info, result.info) + # check that post hooks are always called + [m.reset_mock() for m in all_mocks] + ip.run_cell("syntax error") + assert pre_always.called + assert pre_explicit.called + assert post_always.called + assert post_explicit.called + info, = pre_explicit.call_args[0] + result, = post_explicit.call_args[0] + self.assertEqual(info, result.info) + finally: + # remove post-exec + ip.events.unregister('pre_run_cell', pre_explicit) + ip.events.unregister('pre_execute', pre_always) + ip.events.unregister('post_run_cell', post_explicit) + ip.events.unregister('post_execute', post_always) + + def test_silent_noadvance(self): + """run_cell(silent=True) doesn't advance execution_count""" + ec = ip.execution_count + # silent should force store_history=False + ip.run_cell("1", store_history=True, silent=True) + + self.assertEqual(ec, ip.execution_count) + # double-check that non-silent exec did what we expected + # silent to avoid + ip.run_cell("1", store_history=True) + self.assertEqual(ec+1, ip.execution_count) + + def test_silent_nodisplayhook(self): + """run_cell(silent=True) doesn't trigger displayhook""" + d = dict(called=False) + + trap = ip.display_trap + save_hook = trap.hook + + def failing_hook(*args, **kwargs): + d['called'] = True + + try: + trap.hook = failing_hook + res = ip.run_cell("1", silent=True) + self.assertFalse(d['called']) + self.assertIsNone(res.result) + # double-check that non-silent exec did what we expected + # silent to avoid + ip.run_cell("1") + self.assertTrue(d['called']) + finally: + trap.hook = save_hook + + def test_ofind_line_magic(self): + from IPython.core.magic import register_line_magic + + @register_line_magic + def lmagic(line): + "A line magic" + + # Get info on line magic + lfind = ip._ofind("lmagic") + info = dict( + found=True, + isalias=False, + ismagic=True, + namespace="IPython internal", + obj=lmagic, + parent=None, + ) + self.assertEqual(lfind, info) + + def test_ofind_cell_magic(self): + from IPython.core.magic import register_cell_magic + + @register_cell_magic + def cmagic(line, cell): + "A cell magic" + + # Get info on cell magic + find = ip._ofind("cmagic") + info = dict( + found=True, + isalias=False, + ismagic=True, + namespace="IPython internal", + obj=cmagic, + parent=None, + ) + self.assertEqual(find, info) + + def test_ofind_property_with_error(self): + class A(object): + @property + def foo(self): + raise NotImplementedError() # pragma: no cover + + a = A() + + found = ip._ofind('a.foo', [('locals', locals())]) + info = dict(found=True, isalias=False, ismagic=False, + namespace='locals', obj=A.foo, parent=a) + self.assertEqual(found, info) + + def test_ofind_multiple_attribute_lookups(self): + class A(object): + @property + def foo(self): + raise NotImplementedError() # pragma: no cover + + a = A() + a.a = A() + a.a.a = A() + + found = ip._ofind('a.a.a.foo', [('locals', locals())]) + info = dict(found=True, isalias=False, ismagic=False, + namespace='locals', obj=A.foo, parent=a.a.a) + self.assertEqual(found, info) + + def test_ofind_slotted_attributes(self): + class A(object): + __slots__ = ['foo'] + def __init__(self): + self.foo = 'bar' + + a = A() + found = ip._ofind('a.foo', [('locals', locals())]) + info = dict(found=True, isalias=False, ismagic=False, + namespace='locals', obj=a.foo, parent=a) + self.assertEqual(found, info) + + found = ip._ofind('a.bar', [('locals', locals())]) + info = dict(found=False, isalias=False, ismagic=False, + namespace=None, obj=None, parent=a) + self.assertEqual(found, info) + + def test_ofind_prefers_property_to_instance_level_attribute(self): + class A(object): + @property + def foo(self): + return 'bar' + a = A() + a.__dict__["foo"] = "baz" + self.assertEqual(a.foo, "bar") + found = ip._ofind("a.foo", [("locals", locals())]) + self.assertIs(found["obj"], A.foo) + + def test_custom_syntaxerror_exception(self): + called = [] + def my_handler(shell, etype, value, tb, tb_offset=None): + called.append(etype) + shell.showtraceback((etype, value, tb), tb_offset=tb_offset) + + ip.set_custom_exc((SyntaxError,), my_handler) + try: + ip.run_cell("1f") + # Check that this was called, and only once. + self.assertEqual(called, [SyntaxError]) + finally: + # Reset the custom exception hook + ip.set_custom_exc((), None) + + def test_custom_exception(self): + called = [] + def my_handler(shell, etype, value, tb, tb_offset=None): + called.append(etype) + shell.showtraceback((etype, value, tb), tb_offset=tb_offset) + + ip.set_custom_exc((ValueError,), my_handler) + try: + res = ip.run_cell("raise ValueError('test')") + # Check that this was called, and only once. + self.assertEqual(called, [ValueError]) + # Check that the error is on the result object + self.assertIsInstance(res.error_in_exec, ValueError) + finally: + # Reset the custom exception hook + ip.set_custom_exc((), None) + + @mock.patch("builtins.print") + def test_showtraceback_with_surrogates(self, mocked_print): + values = [] + + def mock_print_func(value, sep=" ", end="\n", file=sys.stdout, flush=False): + values.append(value) + if value == chr(0xD8FF): + raise UnicodeEncodeError("utf-8", chr(0xD8FF), 0, 1, "") + + # mock builtins.print + mocked_print.side_effect = mock_print_func + + # ip._showtraceback() is replaced in globalipapp.py. + # Call original method to test. + interactiveshell.InteractiveShell._showtraceback(ip, None, None, chr(0xD8FF)) + + self.assertEqual(mocked_print.call_count, 2) + self.assertEqual(values, [chr(0xD8FF), "\\ud8ff"]) + + def test_mktempfile(self): + filename = ip.mktempfile() + # Check that we can open the file again on Windows + with open(filename, "w", encoding="utf-8") as f: + f.write("abc") + + filename = ip.mktempfile(data="blah") + with open(filename, "r", encoding="utf-8") as f: + self.assertEqual(f.read(), "blah") + + def test_new_main_mod(self): + # Smoketest to check that this accepts a unicode module name + name = u'jiefmw' + mod = ip.new_main_mod(u'%s.py' % name, name) + self.assertEqual(mod.__name__, name) + + def test_get_exception_only(self): + try: + raise KeyboardInterrupt + except KeyboardInterrupt: + msg = ip.get_exception_only() + self.assertEqual(msg, 'KeyboardInterrupt\n') + + try: + raise DerivedInterrupt("foo") + except KeyboardInterrupt: + msg = ip.get_exception_only() + self.assertEqual(msg, 'IPython.core.tests.test_interactiveshell.DerivedInterrupt: foo\n') + + def test_inspect_text(self): + ip.run_cell('a = 5') + text = ip.object_inspect_text('a') + self.assertIsInstance(text, str) + + def test_last_execution_result(self): + """ Check that last execution result gets set correctly (GH-10702) """ + result = ip.run_cell('a = 5; a') + self.assertTrue(ip.last_execution_succeeded) + self.assertEqual(ip.last_execution_result.result, 5) + + result = ip.run_cell('a = x_invalid_id_x') + self.assertFalse(ip.last_execution_succeeded) + self.assertFalse(ip.last_execution_result.success) + self.assertIsInstance(ip.last_execution_result.error_in_exec, NameError) + + def test_reset_aliasing(self): + """ Check that standard posix aliases work after %reset. """ + if os.name != 'posix': + return + + ip.reset() + for cmd in ('clear', 'more', 'less', 'man'): + res = ip.run_cell('%' + cmd) + self.assertEqual(res.success, True) + + +class TestSafeExecfileNonAsciiPath(unittest.TestCase): + + @onlyif_unicode_paths + def setUp(self): + self.BASETESTDIR = tempfile.mkdtemp() + self.TESTDIR = join(self.BASETESTDIR, u"åäö") + os.mkdir(self.TESTDIR) + with open( + join(self.TESTDIR, "åäötestscript.py"), "w", encoding="utf-8" + ) as sfile: + sfile.write("pass\n") + self.oldpath = os.getcwd() + os.chdir(self.TESTDIR) + self.fname = u"åäötestscript.py" + + def tearDown(self): + os.chdir(self.oldpath) + shutil.rmtree(self.BASETESTDIR) + + @onlyif_unicode_paths + def test_1(self): + """Test safe_execfile with non-ascii path + """ + ip.safe_execfile(self.fname, {}, raise_exceptions=True) + +class ExitCodeChecks(tt.TempFileMixin): + + def setUp(self): + self.system = ip.system_raw + + def test_exit_code_ok(self): + self.system('exit 0') + self.assertEqual(ip.user_ns['_exit_code'], 0) + + def test_exit_code_error(self): + self.system('exit 1') + self.assertEqual(ip.user_ns['_exit_code'], 1) + + @skipif(not hasattr(signal, 'SIGALRM')) + def test_exit_code_signal(self): + self.mktmp("import signal, time\n" + "signal.setitimer(signal.ITIMER_REAL, 0.1)\n" + "time.sleep(1)\n") + self.system("%s %s" % (sys.executable, self.fname)) + self.assertEqual(ip.user_ns['_exit_code'], -signal.SIGALRM) + + @onlyif_cmds_exist("csh") + def test_exit_code_signal_csh(self): # pragma: no cover + SHELL = os.environ.get("SHELL", None) + os.environ["SHELL"] = find_cmd("csh") + try: + self.test_exit_code_signal() + finally: + if SHELL is not None: + os.environ['SHELL'] = SHELL + else: + del os.environ['SHELL'] + + +class TestSystemRaw(ExitCodeChecks): + + def setUp(self): + super().setUp() + self.system = ip.system_raw + + @onlyif_unicode_paths + def test_1(self): + """Test system_raw with non-ascii cmd + """ + cmd = u'''python -c "'åäö'" ''' + ip.system_raw(cmd) + + @mock.patch('subprocess.call', side_effect=KeyboardInterrupt) + @mock.patch('os.system', side_effect=KeyboardInterrupt) + def test_control_c(self, *mocks): + try: + self.system("sleep 1 # wont happen") + except KeyboardInterrupt: # pragma: no cove + self.fail( + "system call should intercept " + "keyboard interrupt from subprocess.call" + ) + self.assertEqual(ip.user_ns["_exit_code"], -signal.SIGINT) + + def test_magic_warnings(self): + for magic_cmd in ("pip", "conda", "cd"): + with self.assertWarnsRegex(Warning, "You executed the system command"): + ip.system_raw(magic_cmd) + +# TODO: Exit codes are currently ignored on Windows. +class TestSystemPipedExitCode(ExitCodeChecks): + + def setUp(self): + super().setUp() + self.system = ip.system_piped + + @skip_win32 + def test_exit_code_ok(self): + ExitCodeChecks.test_exit_code_ok(self) + + @skip_win32 + def test_exit_code_error(self): + ExitCodeChecks.test_exit_code_error(self) + + @skip_win32 + def test_exit_code_signal(self): + ExitCodeChecks.test_exit_code_signal(self) + +class TestModules(tt.TempFileMixin): + def test_extraneous_loads(self): + """Test we're not loading modules on startup that we shouldn't. + """ + self.mktmp("import sys\n" + "print('numpy' in sys.modules)\n" + "print('ipyparallel' in sys.modules)\n" + "print('ipykernel' in sys.modules)\n" + ) + out = "False\nFalse\nFalse\n" + tt.ipexec_validate(self.fname, out) + +class Negator(ast.NodeTransformer): + """Negates all number literals in an AST.""" + + # for python 3.7 and earlier + def visit_Num(self, node): + node.n = -node.n + return node + + # for python 3.8+ + def visit_Constant(self, node): + if isinstance(node.value, int): + return self.visit_Num(node) + return node + +class TestAstTransform(unittest.TestCase): + def setUp(self): + self.negator = Negator() + ip.ast_transformers.append(self.negator) + + def tearDown(self): + ip.ast_transformers.remove(self.negator) + + def test_non_int_const(self): + with tt.AssertPrints("hello"): + ip.run_cell('print("hello")') + + def test_run_cell(self): + with tt.AssertPrints("-34"): + ip.run_cell("print(12 + 22)") + + # A named reference to a number shouldn't be transformed. + ip.user_ns["n"] = 55 + with tt.AssertNotPrints("-55"): + ip.run_cell("print(n)") + + def test_timeit(self): + called = set() + def f(x): + called.add(x) + ip.push({'f':f}) + + with tt.AssertPrints("std. dev. of"): + ip.run_line_magic("timeit", "-n1 f(1)") + self.assertEqual(called, {-1}) + called.clear() + + with tt.AssertPrints("std. dev. of"): + ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)") + self.assertEqual(called, {-2, -3}) + + def test_time(self): + called = [] + def f(x): + called.append(x) + ip.push({'f':f}) + + # Test with an expression + with tt.AssertPrints("Wall time: "): + ip.run_line_magic("time", "f(5+9)") + self.assertEqual(called, [-14]) + called[:] = [] + + # Test with a statement (different code path) + with tt.AssertPrints("Wall time: "): + ip.run_line_magic("time", "a = f(-3 + -2)") + self.assertEqual(called, [5]) + + def test_macro(self): + ip.push({'a':10}) + # The AST transformation makes this do a+=-1 + ip.define_macro("amacro", "a+=1\nprint(a)") + + with tt.AssertPrints("9"): + ip.run_cell("amacro") + with tt.AssertPrints("8"): + ip.run_cell("amacro") + +class TestMiscTransform(unittest.TestCase): + + + def test_transform_only_once(self): + cleanup = 0 + line_t = 0 + def count_cleanup(lines): + nonlocal cleanup + cleanup += 1 + return lines + + def count_line_t(lines): + nonlocal line_t + line_t += 1 + return lines + + ip.input_transformer_manager.cleanup_transforms.append(count_cleanup) + ip.input_transformer_manager.line_transforms.append(count_line_t) + + ip.run_cell('1') + + assert cleanup == 1 + assert line_t == 1 + +class IntegerWrapper(ast.NodeTransformer): + """Wraps all integers in a call to Integer()""" + + # for Python 3.7 and earlier + + # for Python 3.7 and earlier + def visit_Num(self, node): + if isinstance(node.n, int): + return ast.Call(func=ast.Name(id='Integer', ctx=ast.Load()), + args=[node], keywords=[]) + return node + + # For Python 3.8+ + def visit_Constant(self, node): + if isinstance(node.value, int): + return self.visit_Num(node) + return node + + +class TestAstTransform2(unittest.TestCase): + def setUp(self): + self.intwrapper = IntegerWrapper() + ip.ast_transformers.append(self.intwrapper) + + self.calls = [] + def Integer(*args): + self.calls.append(args) + return args + ip.push({"Integer": Integer}) + + def tearDown(self): + ip.ast_transformers.remove(self.intwrapper) + del ip.user_ns['Integer'] + + def test_run_cell(self): + ip.run_cell("n = 2") + self.assertEqual(self.calls, [(2,)]) + + # This shouldn't throw an error + ip.run_cell("o = 2.0") + self.assertEqual(ip.user_ns['o'], 2.0) + + def test_run_cell_non_int(self): + ip.run_cell("n = 'a'") + assert self.calls == [] + + def test_timeit(self): + called = set() + def f(x): + called.add(x) + ip.push({'f':f}) + + with tt.AssertPrints("std. dev. of"): + ip.run_line_magic("timeit", "-n1 f(1)") + self.assertEqual(called, {(1,)}) + called.clear() + + with tt.AssertPrints("std. dev. of"): + ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)") + self.assertEqual(called, {(2,), (3,)}) + +class ErrorTransformer(ast.NodeTransformer): + """Throws an error when it sees a number.""" + + def visit_Constant(self, node): + if isinstance(node.value, int): + raise ValueError("test") + return node + + +class TestAstTransformError(unittest.TestCase): + def test_unregistering(self): + err_transformer = ErrorTransformer() + ip.ast_transformers.append(err_transformer) + + with self.assertWarnsRegex(UserWarning, "It will be unregistered"): + ip.run_cell("1 + 2") + + # This should have been removed. + self.assertNotIn(err_transformer, ip.ast_transformers) + + +class StringRejector(ast.NodeTransformer): + """Throws an InputRejected when it sees a string literal. + + Used to verify that NodeTransformers can signal that a piece of code should + not be executed by throwing an InputRejected. + """ + + # 3.8 only + def visit_Constant(self, node): + if isinstance(node.value, str): + raise InputRejected("test") + return node + + +class TestAstTransformInputRejection(unittest.TestCase): + + def setUp(self): + self.transformer = StringRejector() + ip.ast_transformers.append(self.transformer) + + def tearDown(self): + ip.ast_transformers.remove(self.transformer) + + def test_input_rejection(self): + """Check that NodeTransformers can reject input.""" + + expect_exception_tb = tt.AssertPrints("InputRejected: test") + expect_no_cell_output = tt.AssertNotPrints("'unsafe'", suppress=False) + + # Run the same check twice to verify that the transformer is not + # disabled after raising. + with expect_exception_tb, expect_no_cell_output: + ip.run_cell("'unsafe'") + + with expect_exception_tb, expect_no_cell_output: + res = ip.run_cell("'unsafe'") + + self.assertIsInstance(res.error_before_exec, InputRejected) + +def test__IPYTHON__(): + # This shouldn't raise a NameError, that's all + __IPYTHON__ + + +class DummyRepr(object): + def __repr__(self): + return "DummyRepr" + + def _repr_html_(self): + return "dummy" + + def _repr_javascript_(self): + return "console.log('hi');", {'key': 'value'} + + +def test_user_variables(): + # enable all formatters + ip.display_formatter.active_types = ip.display_formatter.format_types + + ip.user_ns['dummy'] = d = DummyRepr() + keys = {'dummy', 'doesnotexist'} + r = ip.user_expressions({ key:key for key in keys}) + + assert keys == set(r.keys()) + dummy = r["dummy"] + assert {"status", "data", "metadata"} == set(dummy.keys()) + assert dummy["status"] == "ok" + data = dummy["data"] + metadata = dummy["metadata"] + assert data.get("text/html") == d._repr_html_() + js, jsmd = d._repr_javascript_() + assert data.get("application/javascript") == js + assert metadata.get("application/javascript") == jsmd + + dne = r["doesnotexist"] + assert dne["status"] == "error" + assert dne["ename"] == "NameError" + + # back to text only + ip.display_formatter.active_types = ['text/plain'] + +def test_user_expression(): + # enable all formatters + ip.display_formatter.active_types = ip.display_formatter.format_types + query = { + 'a' : '1 + 2', + 'b' : '1/0', + } + r = ip.user_expressions(query) + import pprint + pprint.pprint(r) + assert set(r.keys()) == set(query.keys()) + a = r["a"] + assert {"status", "data", "metadata"} == set(a.keys()) + assert a["status"] == "ok" + data = a["data"] + metadata = a["metadata"] + assert data.get("text/plain") == "3" + + b = r["b"] + assert b["status"] == "error" + assert b["ename"] == "ZeroDivisionError" + + # back to text only + ip.display_formatter.active_types = ['text/plain'] + + +class TestSyntaxErrorTransformer(unittest.TestCase): + """Check that SyntaxError raised by an input transformer is handled by run_cell()""" + + @staticmethod + def transformer(lines): + for line in lines: + pos = line.find('syntaxerror') + if pos >= 0: + e = SyntaxError('input contains "syntaxerror"') + e.text = line + e.offset = pos + 1 + raise e + return lines + + def setUp(self): + ip.input_transformers_post.append(self.transformer) + + def tearDown(self): + ip.input_transformers_post.remove(self.transformer) + + def test_syntaxerror_input_transformer(self): + with tt.AssertPrints('1234'): + ip.run_cell('1234') + with tt.AssertPrints('SyntaxError: invalid syntax'): + ip.run_cell('1 2 3') # plain python syntax error + with tt.AssertPrints('SyntaxError: input contains "syntaxerror"'): + ip.run_cell('2345 # syntaxerror') # input transformer syntax error + with tt.AssertPrints('3456'): + ip.run_cell('3456') + + +class TestWarningSuppression(unittest.TestCase): + def test_warning_suppression(self): + ip.run_cell("import warnings") + try: + with self.assertWarnsRegex(UserWarning, "asdf"): + ip.run_cell("warnings.warn('asdf')") + # Here's the real test -- if we run that again, we should get the + # warning again. Traditionally, each warning was only issued once per + # IPython session (approximately), even if the user typed in new and + # different code that should have also triggered the warning, leading + # to much confusion. + with self.assertWarnsRegex(UserWarning, "asdf"): + ip.run_cell("warnings.warn('asdf')") + finally: + ip.run_cell("del warnings") + + + def test_deprecation_warning(self): + ip.run_cell(""" +import warnings +def wrn(): + warnings.warn( + "I AM A WARNING", + DeprecationWarning + ) + """) + try: + with self.assertWarnsRegex(DeprecationWarning, "I AM A WARNING"): + ip.run_cell("wrn()") + finally: + ip.run_cell("del warnings") + ip.run_cell("del wrn") + + +class TestImportNoDeprecate(tt.TempFileMixin): + + def setUp(self): + """Make a valid python temp file.""" + self.mktmp(""" +import warnings +def wrn(): + warnings.warn( + "I AM A WARNING", + DeprecationWarning + ) +""") + super().setUp() + + def test_no_dep(self): + """ + No deprecation warning should be raised from imported functions + """ + ip.run_cell("from {} import wrn".format(self.fname)) + + with tt.AssertNotPrints("I AM A WARNING"): + ip.run_cell("wrn()") + ip.run_cell("del wrn") + + +def test_custom_exc_count(): + hook = mock.Mock(return_value=None) + ip.set_custom_exc((SyntaxError,), hook) + before = ip.execution_count + ip.run_cell("def foo()", store_history=True) + # restore default excepthook + ip.set_custom_exc((), None) + assert hook.call_count == 1 + assert ip.execution_count == before + 1 + + +def test_run_cell_async(): + ip.run_cell("import asyncio") + coro = ip.run_cell_async("await asyncio.sleep(0.01)\n5") + assert asyncio.iscoroutine(coro) + loop = asyncio.new_event_loop() + result = loop.run_until_complete(coro) + assert isinstance(result, interactiveshell.ExecutionResult) + assert result.result == 5 + + +def test_run_cell_await(): + ip.run_cell("import asyncio") + result = ip.run_cell("await asyncio.sleep(0.01); 10") + assert ip.user_ns["_"] == 10 + + +def test_run_cell_asyncio_run(): + ip.run_cell("import asyncio") + result = ip.run_cell("await asyncio.sleep(0.01); 1") + assert ip.user_ns["_"] == 1 + result = ip.run_cell("asyncio.run(asyncio.sleep(0.01)); 2") + assert ip.user_ns["_"] == 2 + result = ip.run_cell("await asyncio.sleep(0.01); 3") + assert ip.user_ns["_"] == 3 + + +def test_should_run_async(): + assert not ip.should_run_async("a = 5") + assert ip.should_run_async("await x") + assert ip.should_run_async("import asyncio; await asyncio.sleep(1)") + + +def test_set_custom_completer(): + num_completers = len(ip.Completer.matchers) + + def foo(*args, **kwargs): + return "I'm a completer!" + + ip.set_custom_completer(foo, 0) + + # check that we've really added a new completer + assert len(ip.Completer.matchers) == num_completers + 1 + + # check that the first completer is the function we defined + assert ip.Completer.matchers[0]() == "I'm a completer!" + + # clean up + ip.Completer.custom_matchers.pop() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_iplib.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_iplib.py new file mode 100644 index 000000000..c5e065005 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_iplib.py @@ -0,0 +1,291 @@ +"""Tests for the key interactiveshell module, where the main ipython class is defined. +""" + +import stack_data +import sys + +SV_VERSION = tuple([int(x) for x in stack_data.__version__.split(".")[0:2]]) + + +def test_reset(): + """reset must clear most namespaces.""" + + # Check that reset runs without error + ip.reset() + + # Once we've reset it (to clear of any junk that might have been there from + # other tests, we can count how many variables are in the user's namespace + nvars_user_ns = len(ip.user_ns) + nvars_hidden = len(ip.user_ns_hidden) + + # Now add a few variables to user_ns, and check that reset clears them + ip.user_ns['x'] = 1 + ip.user_ns['y'] = 1 + ip.reset() + + # Finally, check that all namespaces have only as many variables as we + # expect to find in them: + assert len(ip.user_ns) == nvars_user_ns + assert len(ip.user_ns_hidden) == nvars_hidden + + +# Tests for reporting of exceptions in various modes, handling of SystemExit, +# and %tb functionality. This is really a mix of testing ultraTB and interactiveshell. + +def doctest_tb_plain(): + """ + In [18]: xmode plain + Exception reporting mode: Plain + + In [19]: run simpleerr.py + Traceback (most recent call last): + File ...:... + bar(mode) + File ...:... in bar + div0() + File ...:... in div0 + x/y + ZeroDivisionError: ... + """ + + +def doctest_tb_context(): + """ + In [3]: xmode context + Exception reporting mode: Context + + In [4]: run simpleerr.py + --------------------------------------------------------------------------- + ZeroDivisionError Traceback (most recent call last) + + ... + 30 except IndexError: + 31 mode = 'div' + ---> 33 bar(mode) + + ... in bar(mode) + 15 "bar" + 16 if mode=='div': + ---> 17 div0() + 18 elif mode=='exit': + 19 try: + + ... in div0() + 6 x = 1 + 7 y = 0 + ----> 8 x/y + + ZeroDivisionError: ...""" + + +def doctest_tb_verbose(): + """ + In [5]: xmode verbose + Exception reporting mode: Verbose + + In [6]: run simpleerr.py + --------------------------------------------------------------------------- + ZeroDivisionError Traceback (most recent call last) + + ... + 30 except IndexError: + 31 mode = 'div' + ---> 33 bar(mode) + mode = 'div' + + ... in bar(mode='div') + 15 "bar" + 16 if mode=='div': + ---> 17 div0() + 18 elif mode=='exit': + 19 try: + + ... in div0() + 6 x = 1 + 7 y = 0 + ----> 8 x/y + x = 1 + y = 0 + + ZeroDivisionError: ... + """ + + +def doctest_tb_sysexit(): + """ + In [17]: %xmode plain + Exception reporting mode: Plain + + In [18]: %run simpleerr.py exit + An exception has occurred, use %tb to see the full traceback. + SystemExit: (1, 'Mode = exit') + + In [19]: %run simpleerr.py exit 2 + An exception has occurred, use %tb to see the full traceback. + SystemExit: (2, 'Mode = exit') + + In [20]: %tb + Traceback (most recent call last): + File ...:... in execfile + exec(compiler(f.read(), fname, "exec"), glob, loc) + File ...:... + bar(mode) + File ...:... in bar + sysexit(stat, mode) + File ...:... in sysexit + raise SystemExit(stat, f"Mode = {mode}") + SystemExit: (2, 'Mode = exit') + + In [21]: %xmode context + Exception reporting mode: Context + + In [22]: %tb + --------------------------------------------------------------------------- + SystemExit Traceback (most recent call last) + File ..., in execfile(fname, glob, loc, compiler) + ... with open(fname, "rb") as f: + ... compiler = compiler or compile + ---> ... exec(compiler(f.read(), fname, "exec"), glob, loc) + ... + 30 except IndexError: + 31 mode = 'div' + ---> 33 bar(mode) + + ...bar(mode) + 21 except: + 22 stat = 1 + ---> 23 sysexit(stat, mode) + 24 else: + 25 raise ValueError('Unknown mode') + + ...sysexit(stat, mode) + 10 def sysexit(stat, mode): + ---> 11 raise SystemExit(stat, f"Mode = {mode}") + + SystemExit: (2, 'Mode = exit') + """ + + +if sys.version_info >= (3, 9): + if SV_VERSION < (0, 6): + + def doctest_tb_sysexit_verbose_stack_data_05(): + """ + In [18]: %run simpleerr.py exit + An exception has occurred, use %tb to see the full traceback. + SystemExit: (1, 'Mode = exit') + + In [19]: %run simpleerr.py exit 2 + An exception has occurred, use %tb to see the full traceback. + SystemExit: (2, 'Mode = exit') + + In [23]: %xmode verbose + Exception reporting mode: Verbose + + In [24]: %tb + --------------------------------------------------------------------------- + SystemExit Traceback (most recent call last) + + ... + 30 except IndexError: + 31 mode = 'div' + ---> 33 bar(mode) + mode = 'exit' + + ... in bar(mode='exit') + ... except: + ... stat = 1 + ---> ... sysexit(stat, mode) + mode = 'exit' + stat = 2 + ... else: + ... raise ValueError('Unknown mode') + + ... in sysexit(stat=2, mode='exit') + 10 def sysexit(stat, mode): + ---> 11 raise SystemExit(stat, f"Mode = {mode}") + stat = 2 + + SystemExit: (2, 'Mode = exit') + """ + + else: + # currently the only difference is + # + mode = 'exit' + + def doctest_tb_sysexit_verbose_stack_data_06(): + """ + In [18]: %run simpleerr.py exit + An exception has occurred, use %tb to see the full traceback. + SystemExit: (1, 'Mode = exit') + + In [19]: %run simpleerr.py exit 2 + An exception has occurred, use %tb to see the full traceback. + SystemExit: (2, 'Mode = exit') + + In [23]: %xmode verbose + Exception reporting mode: Verbose + + In [24]: %tb + --------------------------------------------------------------------------- + SystemExit Traceback (most recent call last) + + ... + 30 except IndexError: + 31 mode = 'div' + ---> 33 bar(mode) + mode = 'exit' + + ... in bar(mode='exit') + ... except: + ... stat = 1 + ---> ... sysexit(stat, mode) + mode = 'exit' + stat = 2 + ... else: + ... raise ValueError('Unknown mode') + + ... in sysexit(stat=2, mode='exit') + 10 def sysexit(stat, mode): + ---> 11 raise SystemExit(stat, f"Mode = {mode}") + stat = 2 + mode = 'exit' + + SystemExit: (2, 'Mode = exit') + """ + +def test_run_cell(): + import textwrap + + ip.run_cell("a = 10\na+=1") + ip.run_cell("assert a == 11\nassert 1") + + assert ip.user_ns["a"] == 11 + complex = textwrap.dedent( + """ + if 1: + print "hello" + if 1: + print "world" + + if 2: + print "foo" + + if 3: + print "bar" + + if 4: + print "bar" + + """ + ) + # Simply verifies that this kind of input is run + ip.run_cell(complex) + + +def test_db(): + """Test the internal database used for variable persistence.""" + ip.db["__unittest_"] = 12 + assert ip.db["__unittest_"] == 12 + del ip.db["__unittest_"] + assert "__unittest_" not in ip.db diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_logger.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_logger.py new file mode 100644 index 000000000..10e462098 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_logger.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +"""Test IPython.core.logger""" + +import os.path + +import pytest +from tempfile import TemporaryDirectory + + +def test_logstart_inaccessible_file(): + with pytest.raises(IOError): + _ip.logger.logstart(logfname="/") # Opening that filename will fail. + + try: + _ip.run_cell("a=1") # Check it doesn't try to log this + finally: + _ip.logger.log_active = False # If this fails, don't let later tests fail + +def test_logstart_unicode(): + with TemporaryDirectory() as tdir: + logfname = os.path.join(tdir, "test_unicode.log") + _ip.run_cell("'abc€'") + try: + _ip.magic("logstart -to %s" % logfname) + _ip.run_cell("'abc€'") + finally: + _ip.logger.logstop() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic.py new file mode 100644 index 000000000..509dd66dd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic.py @@ -0,0 +1,1454 @@ +# -*- coding: utf-8 -*- +"""Tests for various magic functions.""" + +import gc +import io +import os +import re +import shlex +import sys +import warnings +from importlib import invalidate_caches +from io import StringIO +from pathlib import Path +from textwrap import dedent +from unittest import TestCase, mock + +import pytest + +from IPython import get_ipython +from IPython.core import magic +from IPython.core.error import UsageError +from IPython.core.magic import ( + Magics, + cell_magic, + line_magic, + magics_class, + register_cell_magic, + register_line_magic, +) +from IPython.core.magics import code, execution, logging, osm, script +from IPython.testing import decorators as dec +from IPython.testing import tools as tt +from IPython.utils.io import capture_output +from IPython.utils.process import find_cmd +from IPython.utils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory +from IPython.utils.syspathcontext import prepended_to_syspath + +from .test_debugger import PdbTestInput + +from tempfile import NamedTemporaryFile + +@magic.magics_class +class DummyMagics(magic.Magics): pass + +def test_extract_code_ranges(): + instr = "1 3 5-6 7-9 10:15 17: :10 10- -13 :" + expected = [ + (0, 1), + (2, 3), + (4, 6), + (6, 9), + (9, 14), + (16, None), + (None, 9), + (9, None), + (None, 13), + (None, None), + ] + actual = list(code.extract_code_ranges(instr)) + assert actual == expected + +def test_extract_symbols(): + source = """import foo\na = 10\ndef b():\n return 42\n\n\nclass A: pass\n\n\n""" + symbols_args = ["a", "b", "A", "A,b", "A,a", "z"] + expected = [([], ['a']), + (["def b():\n return 42\n"], []), + (["class A: pass\n"], []), + (["class A: pass\n", "def b():\n return 42\n"], []), + (["class A: pass\n"], ['a']), + ([], ['z'])] + for symbols, exp in zip(symbols_args, expected): + assert code.extract_symbols(source, symbols) == exp + + +def test_extract_symbols_raises_exception_with_non_python_code(): + source = ("=begin A Ruby program :)=end\n" + "def hello\n" + "puts 'Hello world'\n" + "end") + with pytest.raises(SyntaxError): + code.extract_symbols(source, "hello") + + +def test_magic_not_found(): + # magic not found raises UsageError + with pytest.raises(UsageError): + _ip.run_line_magic("doesntexist", "") + + # ensure result isn't success when a magic isn't found + result = _ip.run_cell('%doesntexist') + assert isinstance(result.error_in_exec, UsageError) + + +def test_cell_magic_not_found(): + # magic not found raises UsageError + with pytest.raises(UsageError): + _ip.run_cell_magic('doesntexist', 'line', 'cell') + + # ensure result isn't success when a magic isn't found + result = _ip.run_cell('%%doesntexist') + assert isinstance(result.error_in_exec, UsageError) + + +def test_magic_error_status(): + def fail(shell): + 1/0 + _ip.register_magic_function(fail) + result = _ip.run_cell('%fail') + assert isinstance(result.error_in_exec, ZeroDivisionError) + + +def test_config(): + """ test that config magic does not raise + can happen if Configurable init is moved too early into + Magics.__init__ as then a Config object will be registered as a + magic. + """ + ## should not raise. + _ip.run_line_magic("config", "") + + +def test_config_available_configs(): + """ test that config magic prints available configs in unique and + sorted order. """ + with capture_output() as captured: + _ip.run_line_magic("config", "") + + stdout = captured.stdout + config_classes = stdout.strip().split('\n')[1:] + assert config_classes == sorted(set(config_classes)) + +def test_config_print_class(): + """ test that config with a classname prints the class's options. """ + with capture_output() as captured: + _ip.run_line_magic("config", "TerminalInteractiveShell") + + stdout = captured.stdout + assert re.match( + "TerminalInteractiveShell.* options", stdout.splitlines()[0] + ), f"{stdout}\n\n1st line of stdout not like 'TerminalInteractiveShell.* options'" + + +def test_rehashx(): + # clear up everything + _ip.alias_manager.clear_aliases() + del _ip.db['syscmdlist'] + + _ip.run_line_magic("rehashx", "") + # Practically ALL ipython development systems will have more than 10 aliases + + assert len(_ip.alias_manager.aliases) > 10 + for name, cmd in _ip.alias_manager.aliases: + # we must strip dots from alias names + assert "." not in name + + # rehashx must fill up syscmdlist + scoms = _ip.db['syscmdlist'] + assert len(scoms) > 10 + + +def test_magic_parse_options(): + """Test that we don't mangle paths when parsing magic options.""" + ip = get_ipython() + path = 'c:\\x' + m = DummyMagics(ip) + opts = m.parse_options('-f %s' % path,'f:')[0] + # argv splitting is os-dependent + if os.name == 'posix': + expected = 'c:x' + else: + expected = path + assert opts["f"] == expected + + +def test_magic_parse_long_options(): + """Magic.parse_options can handle --foo=bar long options""" + ip = get_ipython() + m = DummyMagics(ip) + opts, _ = m.parse_options("--foo --bar=bubble", "a", "foo", "bar=") + assert "foo" in opts + assert "bar" in opts + assert opts["bar"] == "bubble" + + +def doctest_hist_f(): + """Test %hist -f with temporary filename. + + In [9]: import tempfile + + In [10]: tfile = tempfile.mktemp('.py','tmp-ipython-') + + In [11]: %hist -nl -f $tfile 3 + + In [13]: import os; os.unlink(tfile) + """ + + +def doctest_hist_op(): + """Test %hist -op + + In [1]: class b(float): + ...: pass + ...: + + In [2]: class s(object): + ...: def __str__(self): + ...: return 's' + ...: + + In [3]: + + In [4]: class r(b): + ...: def __repr__(self): + ...: return 'r' + ...: + + In [5]: class sr(s,r): pass + ...: + + In [6]: + + In [7]: bb=b() + + In [8]: ss=s() + + In [9]: rr=r() + + In [10]: ssrr=sr() + + In [11]: 4.5 + Out[11]: 4.5 + + In [12]: str(ss) + Out[12]: 's' + + In [13]: + + In [14]: %hist -op + >>> class b: + ... pass + ... + >>> class s(b): + ... def __str__(self): + ... return 's' + ... + >>> + >>> class r(b): + ... def __repr__(self): + ... return 'r' + ... + >>> class sr(s,r): pass + >>> + >>> bb=b() + >>> ss=s() + >>> rr=r() + >>> ssrr=sr() + >>> 4.5 + 4.5 + >>> str(ss) + 's' + >>> + """ + +def test_hist_pof(): + ip = get_ipython() + ip.run_cell("1+2", store_history=True) + #raise Exception(ip.history_manager.session_number) + #raise Exception(list(ip.history_manager._get_range_session())) + with TemporaryDirectory() as td: + tf = os.path.join(td, 'hist.py') + ip.run_line_magic('history', '-pof %s' % tf) + assert os.path.isfile(tf) + + +def test_macro(): + ip = get_ipython() + ip.history_manager.reset() # Clear any existing history. + cmds = ["a=1", "def b():\n return a**2", "print(a,b())"] + for i, cmd in enumerate(cmds, start=1): + ip.history_manager.store_inputs(i, cmd) + ip.run_line_magic("macro", "test 1-3") + assert ip.user_ns["test"].value == "\n".join(cmds) + "\n" + + # List macros + assert "test" in ip.run_line_magic("macro", "") + + +def test_macro_run(): + """Test that we can run a multi-line macro successfully.""" + ip = get_ipython() + ip.history_manager.reset() + cmds = ["a=10", "a+=1", "print(a)", "%macro test 2-3"] + for cmd in cmds: + ip.run_cell(cmd, store_history=True) + assert ip.user_ns["test"].value == "a+=1\nprint(a)\n" + with tt.AssertPrints("12"): + ip.run_cell("test") + with tt.AssertPrints("13"): + ip.run_cell("test") + + +def test_magic_magic(): + """Test %magic""" + ip = get_ipython() + with capture_output() as captured: + ip.run_line_magic("magic", "") + + stdout = captured.stdout + assert "%magic" in stdout + assert "IPython" in stdout + assert "Available" in stdout + + +@dec.skipif_not_numpy +def test_numpy_reset_array_undec(): + "Test '%reset array' functionality" + _ip.ex("import numpy as np") + _ip.ex("a = np.empty(2)") + assert "a" in _ip.user_ns + _ip.run_line_magic("reset", "-f array") + assert "a" not in _ip.user_ns + + +def test_reset_out(): + "Test '%reset out' magic" + _ip.run_cell("parrot = 'dead'", store_history=True) + # test '%reset -f out', make an Out prompt + _ip.run_cell("parrot", store_history=True) + assert "dead" in [_ip.user_ns[x] for x in ("_", "__", "___")] + _ip.run_line_magic("reset", "-f out") + assert "dead" not in [_ip.user_ns[x] for x in ("_", "__", "___")] + assert len(_ip.user_ns["Out"]) == 0 + + +def test_reset_in(): + "Test '%reset in' magic" + # test '%reset -f in' + _ip.run_cell("parrot", store_history=True) + assert "parrot" in [_ip.user_ns[x] for x in ("_i", "_ii", "_iii")] + _ip.run_line_magic("reset", "-f in") + assert "parrot" not in [_ip.user_ns[x] for x in ("_i", "_ii", "_iii")] + assert len(set(_ip.user_ns["In"])) == 1 + + +def test_reset_dhist(): + "Test '%reset dhist' magic" + _ip.run_cell("tmp = [d for d in _dh]") # copy before clearing + _ip.run_line_magic("cd", os.path.dirname(pytest.__file__)) + _ip.run_line_magic("cd", "-") + assert len(_ip.user_ns["_dh"]) > 0 + _ip.run_line_magic("reset", "-f dhist") + assert len(_ip.user_ns["_dh"]) == 0 + _ip.run_cell("_dh = [d for d in tmp]") # restore + + +def test_reset_in_length(): + "Test that '%reset in' preserves In[] length" + _ip.run_cell("print 'foo'") + _ip.run_cell("reset -f in") + assert len(_ip.user_ns["In"]) == _ip.displayhook.prompt_count + 1 + + +class TestResetErrors(TestCase): + + def test_reset_redefine(self): + + @magics_class + class KernelMagics(Magics): + @line_magic + def less(self, shell): pass + + _ip.register_magics(KernelMagics) + + with self.assertLogs() as cm: + # hack, we want to just capture logs, but assertLogs fails if not + # logs get produce. + # so log one things we ignore. + import logging as log_mod + log = log_mod.getLogger() + log.info('Nothing') + # end hack. + _ip.run_cell("reset -f") + + assert len(cm.output) == 1 + for out in cm.output: + assert "Invalid alias" not in out + +def test_tb_syntaxerror(): + """test %tb after a SyntaxError""" + ip = get_ipython() + ip.run_cell("for") + + # trap and validate stdout + save_stdout = sys.stdout + try: + sys.stdout = StringIO() + ip.run_cell("%tb") + out = sys.stdout.getvalue() + finally: + sys.stdout = save_stdout + # trim output, and only check the last line + last_line = out.rstrip().splitlines()[-1].strip() + assert last_line == "SyntaxError: invalid syntax" + + +def test_time(): + ip = get_ipython() + + with tt.AssertPrints("Wall time: "): + ip.run_cell("%time None") + + ip.run_cell("def f(kmjy):\n" + " %time print (2*kmjy)") + + with tt.AssertPrints("Wall time: "): + with tt.AssertPrints("hihi", suppress=False): + ip.run_cell("f('hi')") + +def test_time_last_not_expression(): + ip.run_cell("%%time\n" + "var_1 = 1\n" + "var_2 = 2\n") + assert ip.user_ns['var_1'] == 1 + del ip.user_ns['var_1'] + assert ip.user_ns['var_2'] == 2 + del ip.user_ns['var_2'] + + +@dec.skip_win32 +def test_time2(): + ip = get_ipython() + + with tt.AssertPrints("CPU times: user "): + ip.run_cell("%time None") + +def test_time3(): + """Erroneous magic function calls, issue gh-3334""" + ip = get_ipython() + ip.user_ns.pop('run', None) + + with tt.AssertNotPrints("not found", channel='stderr'): + ip.run_cell("%%time\n" + "run = 0\n" + "run += 1") + +def test_multiline_time(): + """Make sure last statement from time return a value.""" + ip = get_ipython() + ip.user_ns.pop('run', None) + + ip.run_cell( + dedent( + """\ + %%time + a = "ho" + b = "hey" + a+b + """ + ) + ) + assert ip.user_ns_hidden["_"] == "hohey" + + +def test_time_local_ns(): + """ + Test that local_ns is actually global_ns when running a cell magic + """ + ip = get_ipython() + ip.run_cell("%%time\n" "myvar = 1") + assert ip.user_ns["myvar"] == 1 + del ip.user_ns["myvar"] + + +def test_doctest_mode(): + "Toggle doctest_mode twice, it should be a no-op and run without error" + _ip.run_line_magic("doctest_mode", "") + _ip.run_line_magic("doctest_mode", "") + + +def test_parse_options(): + """Tests for basic options parsing in magics.""" + # These are only the most minimal of tests, more should be added later. At + # the very least we check that basic text/unicode calls work OK. + m = DummyMagics(_ip) + assert m.parse_options("foo", "")[1] == "foo" + assert m.parse_options("foo", "")[1] == "foo" + + +def test_parse_options_preserve_non_option_string(): + """Test to assert preservation of non-option part of magic-block, while parsing magic options.""" + m = DummyMagics(_ip) + opts, stmt = m.parse_options( + " -n1 -r 13 _ = 314 + foo", "n:r:", preserve_non_opts=True + ) + assert opts == {"n": "1", "r": "13"} + assert stmt == "_ = 314 + foo" + + +def test_run_magic_preserve_code_block(): + """Test to assert preservation of non-option part of magic-block, while running magic.""" + _ip.user_ns["spaces"] = [] + _ip.run_line_magic( + "timeit", "-n1 -r1 spaces.append([s.count(' ') for s in ['document']])" + ) + assert _ip.user_ns["spaces"] == [[0]] + + +def test_dirops(): + """Test various directory handling operations.""" + # curpath = lambda :os.path.splitdrive(os.getcwd())[1].replace('\\','/') + curpath = os.getcwd + startdir = os.getcwd() + ipdir = os.path.realpath(_ip.ipython_dir) + try: + _ip.run_line_magic("cd", '"%s"' % ipdir) + assert curpath() == ipdir + _ip.run_line_magic("cd", "-") + assert curpath() == startdir + _ip.run_line_magic("pushd", '"%s"' % ipdir) + assert curpath() == ipdir + _ip.run_line_magic("popd", "") + assert curpath() == startdir + finally: + os.chdir(startdir) + + +def test_cd_force_quiet(): + """Test OSMagics.cd_force_quiet option""" + _ip.config.OSMagics.cd_force_quiet = True + osmagics = osm.OSMagics(shell=_ip) + + startdir = os.getcwd() + ipdir = os.path.realpath(_ip.ipython_dir) + + try: + with tt.AssertNotPrints(ipdir): + osmagics.cd('"%s"' % ipdir) + with tt.AssertNotPrints(startdir): + osmagics.cd('-') + finally: + os.chdir(startdir) + + +def test_xmode(): + # Calling xmode three times should be a no-op + xmode = _ip.InteractiveTB.mode + for i in range(4): + _ip.run_line_magic("xmode", "") + assert _ip.InteractiveTB.mode == xmode + +def test_reset_hard(): + monitor = [] + class A(object): + def __del__(self): + monitor.append(1) + def __repr__(self): + return "" + + _ip.user_ns["a"] = A() + _ip.run_cell("a") + + assert monitor == [] + _ip.run_line_magic("reset", "-f") + assert monitor == [1] + +class TestXdel(tt.TempFileMixin): + def test_xdel(self): + """Test that references from %run are cleared by xdel.""" + src = ("class A(object):\n" + " monitor = []\n" + " def __del__(self):\n" + " self.monitor.append(1)\n" + "a = A()\n") + self.mktmp(src) + # %run creates some hidden references... + _ip.run_line_magic("run", "%s" % self.fname) + # ... as does the displayhook. + _ip.run_cell("a") + + monitor = _ip.user_ns["A"].monitor + assert monitor == [] + + _ip.run_line_magic("xdel", "a") + + # Check that a's __del__ method has been called. + gc.collect(0) + assert monitor == [1] + +def doctest_who(): + """doctest for %who + + In [1]: %reset -sf + + In [2]: alpha = 123 + + In [3]: beta = 'beta' + + In [4]: %who int + alpha + + In [5]: %who str + beta + + In [6]: %whos + Variable Type Data/Info + ---------------------------- + alpha int 123 + beta str beta + + In [7]: %who_ls + Out[7]: ['alpha', 'beta'] + """ + +def test_whos(): + """Check that whos is protected against objects where repr() fails.""" + class A(object): + def __repr__(self): + raise Exception() + _ip.user_ns['a'] = A() + _ip.run_line_magic("whos", "") + +def doctest_precision(): + """doctest for %precision + + In [1]: f = get_ipython().display_formatter.formatters['text/plain'] + + In [2]: %precision 5 + Out[2]: '%.5f' + + In [3]: f.float_format + Out[3]: '%.5f' + + In [4]: %precision %e + Out[4]: '%e' + + In [5]: f(3.1415927) + Out[5]: '3.141593e+00' + """ + +def test_debug_magic(): + """Test debugging a small code with %debug + + In [1]: with PdbTestInput(['c']): + ...: %debug print("a b") #doctest: +ELLIPSIS + ...: + ... + ipdb> c + a b + In [2]: + """ + +def test_psearch(): + with tt.AssertPrints("dict.fromkeys"): + _ip.run_cell("dict.fr*?") + with tt.AssertPrints("π.is_integer"): + _ip.run_cell("π = 3.14;\nπ.is_integ*?") + +def test_timeit_shlex(): + """test shlex issues with timeit (#1109)""" + _ip.ex("def f(*a,**kw): pass") + _ip.run_line_magic("timeit", '-n1 "this is a bug".count(" ")') + _ip.run_line_magic("timeit", '-r1 -n1 f(" ", 1)') + _ip.run_line_magic("timeit", '-r1 -n1 f(" ", 1, " ", 2, " ")') + _ip.run_line_magic("timeit", '-r1 -n1 ("a " + "b")') + _ip.run_line_magic("timeit", '-r1 -n1 f("a " + "b")') + _ip.run_line_magic("timeit", '-r1 -n1 f("a " + "b ")') + + +def test_timeit_special_syntax(): + "Test %%timeit with IPython special syntax" + @register_line_magic + def lmagic(line): + ip = get_ipython() + ip.user_ns['lmagic_out'] = line + + # line mode test + _ip.run_line_magic("timeit", "-n1 -r1 %lmagic my line") + assert _ip.user_ns["lmagic_out"] == "my line" + # cell mode test + _ip.run_cell_magic("timeit", "-n1 -r1", "%lmagic my line2") + assert _ip.user_ns["lmagic_out"] == "my line2" + + +def test_timeit_return(): + """ + test whether timeit -o return object + """ + + res = _ip.run_line_magic('timeit','-n10 -r10 -o 1') + assert(res is not None) + +def test_timeit_quiet(): + """ + test quiet option of timeit magic + """ + with tt.AssertNotPrints("loops"): + _ip.run_cell("%timeit -n1 -r1 -q 1") + +def test_timeit_return_quiet(): + with tt.AssertNotPrints("loops"): + res = _ip.run_line_magic('timeit', '-n1 -r1 -q -o 1') + assert (res is not None) + +def test_timeit_invalid_return(): + with pytest.raises(SyntaxError): + _ip.run_line_magic('timeit', 'return') + +@dec.skipif(execution.profile is None) +def test_prun_special_syntax(): + "Test %%prun with IPython special syntax" + @register_line_magic + def lmagic(line): + ip = get_ipython() + ip.user_ns['lmagic_out'] = line + + # line mode test + _ip.run_line_magic("prun", "-q %lmagic my line") + assert _ip.user_ns["lmagic_out"] == "my line" + # cell mode test + _ip.run_cell_magic("prun", "-q", "%lmagic my line2") + assert _ip.user_ns["lmagic_out"] == "my line2" + + +@dec.skipif(execution.profile is None) +def test_prun_quotes(): + "Test that prun does not clobber string escapes (GH #1302)" + _ip.magic(r"prun -q x = '\t'") + assert _ip.user_ns["x"] == "\t" + + +def test_extension(): + # Debugging information for failures of this test + print('sys.path:') + for p in sys.path: + print(' ', p) + print('CWD', os.getcwd()) + + pytest.raises(ImportError, _ip.magic, "load_ext daft_extension") + daft_path = os.path.join(os.path.dirname(__file__), "daft_extension") + sys.path.insert(0, daft_path) + try: + _ip.user_ns.pop('arq', None) + invalidate_caches() # Clear import caches + _ip.run_line_magic("load_ext", "daft_extension") + assert _ip.user_ns["arq"] == 185 + _ip.run_line_magic("unload_ext", "daft_extension") + assert 'arq' not in _ip.user_ns + finally: + sys.path.remove(daft_path) + + +def test_notebook_export_json(): + pytest.importorskip("nbformat") + _ip = get_ipython() + _ip.history_manager.reset() # Clear any existing history. + cmds = ["a=1", "def b():\n return a**2", "print('noël, été', b())"] + for i, cmd in enumerate(cmds, start=1): + _ip.history_manager.store_inputs(i, cmd) + with TemporaryDirectory() as td: + outfile = os.path.join(td, "nb.ipynb") + _ip.run_line_magic("notebook", "%s" % outfile) + + +class TestEnv(TestCase): + + def test_env(self): + env = _ip.run_line_magic("env", "") + self.assertTrue(isinstance(env, dict)) + + def test_env_secret(self): + env = _ip.run_line_magic("env", "") + hidden = "" + with mock.patch.dict( + os.environ, + { + "API_KEY": "abc123", + "SECRET_THING": "ssshhh", + "JUPYTER_TOKEN": "", + "VAR": "abc" + } + ): + env = _ip.run_line_magic("env", "") + assert env["API_KEY"] == hidden + assert env["SECRET_THING"] == hidden + assert env["JUPYTER_TOKEN"] == hidden + assert env["VAR"] == "abc" + + def test_env_get_set_simple(self): + env = _ip.run_line_magic("env", "var val1") + self.assertEqual(env, None) + self.assertEqual(os.environ["var"], "val1") + self.assertEqual(_ip.run_line_magic("env", "var"), "val1") + env = _ip.run_line_magic("env", "var=val2") + self.assertEqual(env, None) + self.assertEqual(os.environ['var'], 'val2') + + def test_env_get_set_complex(self): + env = _ip.run_line_magic("env", "var 'val1 '' 'val2") + self.assertEqual(env, None) + self.assertEqual(os.environ['var'], "'val1 '' 'val2") + self.assertEqual(_ip.run_line_magic("env", "var"), "'val1 '' 'val2") + env = _ip.run_line_magic("env", 'var=val2 val3="val4') + self.assertEqual(env, None) + self.assertEqual(os.environ['var'], 'val2 val3="val4') + + def test_env_set_bad_input(self): + self.assertRaises(UsageError, lambda: _ip.run_line_magic("set_env", "var")) + + def test_env_set_whitespace(self): + self.assertRaises(UsageError, lambda: _ip.run_line_magic("env", "var A=B")) + + +class CellMagicTestCase(TestCase): + + def check_ident(self, magic): + # Manually called, we get the result + out = _ip.run_cell_magic(magic, "a", "b") + assert out == ("a", "b") + # Via run_cell, it goes into the user's namespace via displayhook + _ip.run_cell("%%" + magic + " c\nd\n") + assert _ip.user_ns["_"] == ("c", "d\n") + + def test_cell_magic_func_deco(self): + "Cell magic using simple decorator" + @register_cell_magic + def cellm(line, cell): + return line, cell + + self.check_ident('cellm') + + def test_cell_magic_reg(self): + "Cell magic manually registered" + def cellm(line, cell): + return line, cell + + _ip.register_magic_function(cellm, 'cell', 'cellm2') + self.check_ident('cellm2') + + def test_cell_magic_class(self): + "Cell magics declared via a class" + @magics_class + class MyMagics(Magics): + + @cell_magic + def cellm3(self, line, cell): + return line, cell + + _ip.register_magics(MyMagics) + self.check_ident('cellm3') + + def test_cell_magic_class2(self): + "Cell magics declared via a class, #2" + @magics_class + class MyMagics2(Magics): + + @cell_magic('cellm4') + def cellm33(self, line, cell): + return line, cell + + _ip.register_magics(MyMagics2) + self.check_ident('cellm4') + # Check that nothing is registered as 'cellm33' + c33 = _ip.find_cell_magic('cellm33') + assert c33 == None + +def test_file(): + """Basic %%writefile""" + ip = get_ipython() + with TemporaryDirectory() as td: + fname = os.path.join(td, "file1") + ip.run_cell_magic( + "writefile", + fname, + "\n".join( + [ + "line1", + "line2", + ] + ), + ) + s = Path(fname).read_text(encoding="utf-8") + assert "line1\n" in s + assert "line2" in s + + +@dec.skip_win32 +def test_file_single_quote(): + """Basic %%writefile with embedded single quotes""" + ip = get_ipython() + with TemporaryDirectory() as td: + fname = os.path.join(td, "'file1'") + ip.run_cell_magic( + "writefile", + fname, + "\n".join( + [ + "line1", + "line2", + ] + ), + ) + s = Path(fname).read_text(encoding="utf-8") + assert "line1\n" in s + assert "line2" in s + + +@dec.skip_win32 +def test_file_double_quote(): + """Basic %%writefile with embedded double quotes""" + ip = get_ipython() + with TemporaryDirectory() as td: + fname = os.path.join(td, '"file1"') + ip.run_cell_magic( + "writefile", + fname, + "\n".join( + [ + "line1", + "line2", + ] + ), + ) + s = Path(fname).read_text(encoding="utf-8") + assert "line1\n" in s + assert "line2" in s + + +def test_file_var_expand(): + """%%writefile $filename""" + ip = get_ipython() + with TemporaryDirectory() as td: + fname = os.path.join(td, "file1") + ip.user_ns["filename"] = fname + ip.run_cell_magic( + "writefile", + "$filename", + "\n".join( + [ + "line1", + "line2", + ] + ), + ) + s = Path(fname).read_text(encoding="utf-8") + assert "line1\n" in s + assert "line2" in s + + +def test_file_unicode(): + """%%writefile with unicode cell""" + ip = get_ipython() + with TemporaryDirectory() as td: + fname = os.path.join(td, 'file1') + ip.run_cell_magic("writefile", fname, u'\n'.join([ + u'liné1', + u'liné2', + ])) + with io.open(fname, encoding='utf-8') as f: + s = f.read() + assert "liné1\n" in s + assert "liné2" in s + + +def test_file_amend(): + """%%writefile -a amends files""" + ip = get_ipython() + with TemporaryDirectory() as td: + fname = os.path.join(td, "file2") + ip.run_cell_magic( + "writefile", + fname, + "\n".join( + [ + "line1", + "line2", + ] + ), + ) + ip.run_cell_magic( + "writefile", + "-a %s" % fname, + "\n".join( + [ + "line3", + "line4", + ] + ), + ) + s = Path(fname).read_text(encoding="utf-8") + assert "line1\n" in s + assert "line3\n" in s + + +def test_file_spaces(): + """%%file with spaces in filename""" + ip = get_ipython() + with TemporaryWorkingDirectory() as td: + fname = "file name" + ip.run_cell_magic( + "file", + '"%s"' % fname, + "\n".join( + [ + "line1", + "line2", + ] + ), + ) + s = Path(fname).read_text(encoding="utf-8") + assert "line1\n" in s + assert "line2" in s + + +def test_script_config(): + ip = get_ipython() + ip.config.ScriptMagics.script_magics = ['whoda'] + sm = script.ScriptMagics(shell=ip) + assert "whoda" in sm.magics["cell"] + + +def test_script_out(): + ip = get_ipython() + ip.run_cell_magic("script", f"--out output {sys.executable}", "print('hi')") + assert ip.user_ns["output"].strip() == "hi" + + +def test_script_err(): + ip = get_ipython() + ip.run_cell_magic( + "script", + f"--err error {sys.executable}", + "import sys; print('hello', file=sys.stderr)", + ) + assert ip.user_ns["error"].strip() == "hello" + + +def test_script_out_err(): + + ip = get_ipython() + ip.run_cell_magic( + "script", + f"--out output --err error {sys.executable}", + "\n".join( + [ + "import sys", + "print('hi')", + "print('hello', file=sys.stderr)", + ] + ), + ) + assert ip.user_ns["output"].strip() == "hi" + assert ip.user_ns["error"].strip() == "hello" + + +async def test_script_bg_out(): + ip = get_ipython() + ip.run_cell_magic("script", f"--bg --out output {sys.executable}", "print('hi')") + assert (await ip.user_ns["output"].read()).strip() == b"hi" + assert ip.user_ns["output"].at_eof() + + +async def test_script_bg_err(): + ip = get_ipython() + ip.run_cell_magic( + "script", + f"--bg --err error {sys.executable}", + "import sys; print('hello', file=sys.stderr)", + ) + assert (await ip.user_ns["error"].read()).strip() == b"hello" + assert ip.user_ns["error"].at_eof() + + +async def test_script_bg_out_err(): + ip = get_ipython() + ip.run_cell_magic( + "script", + f"--bg --out output --err error {sys.executable}", + "\n".join( + [ + "import sys", + "print('hi')", + "print('hello', file=sys.stderr)", + ] + ), + ) + assert (await ip.user_ns["output"].read()).strip() == b"hi" + assert (await ip.user_ns["error"].read()).strip() == b"hello" + assert ip.user_ns["output"].at_eof() + assert ip.user_ns["error"].at_eof() + + +async def test_script_bg_proc(): + ip = get_ipython() + ip.run_cell_magic( + "script", + f"--bg --out output --proc p {sys.executable}", + "\n".join( + [ + "import sys", + "print('hi')", + "print('hello', file=sys.stderr)", + ] + ), + ) + p = ip.user_ns["p"] + await p.wait() + assert p.returncode == 0 + assert (await p.stdout.read()).strip() == b"hi" + # not captured, so empty + assert (await p.stderr.read()) == b"" + assert p.stdout.at_eof() + assert p.stderr.at_eof() + + +def test_script_defaults(): + ip = get_ipython() + for cmd in ['sh', 'bash', 'perl', 'ruby']: + try: + find_cmd(cmd) + except Exception: + pass + else: + assert cmd in ip.magics_manager.magics["cell"] + + +@magics_class +class FooFoo(Magics): + """class with both %foo and %%foo magics""" + @line_magic('foo') + def line_foo(self, line): + "I am line foo" + pass + + @cell_magic("foo") + def cell_foo(self, line, cell): + "I am cell foo, not line foo" + pass + +def test_line_cell_info(): + """%%foo and %foo magics are distinguishable to inspect""" + ip = get_ipython() + ip.magics_manager.register(FooFoo) + oinfo = ip.object_inspect("foo") + assert oinfo["found"] is True + assert oinfo["ismagic"] is True + + oinfo = ip.object_inspect("%%foo") + assert oinfo["found"] is True + assert oinfo["ismagic"] is True + assert oinfo["docstring"] == FooFoo.cell_foo.__doc__ + + oinfo = ip.object_inspect("%foo") + assert oinfo["found"] is True + assert oinfo["ismagic"] is True + assert oinfo["docstring"] == FooFoo.line_foo.__doc__ + + +def test_multiple_magics(): + ip = get_ipython() + foo1 = FooFoo(ip) + foo2 = FooFoo(ip) + mm = ip.magics_manager + mm.register(foo1) + assert mm.magics["line"]["foo"].__self__ is foo1 + mm.register(foo2) + assert mm.magics["line"]["foo"].__self__ is foo2 + + +def test_alias_magic(): + """Test %alias_magic.""" + ip = get_ipython() + mm = ip.magics_manager + + # Basic operation: both cell and line magics are created, if possible. + ip.run_line_magic("alias_magic", "timeit_alias timeit") + assert "timeit_alias" in mm.magics["line"] + assert "timeit_alias" in mm.magics["cell"] + + # --cell is specified, line magic not created. + ip.run_line_magic("alias_magic", "--cell timeit_cell_alias timeit") + assert "timeit_cell_alias" not in mm.magics["line"] + assert "timeit_cell_alias" in mm.magics["cell"] + + # Test that line alias is created successfully. + ip.run_line_magic("alias_magic", "--line env_alias env") + assert ip.run_line_magic("env", "") == ip.run_line_magic("env_alias", "") + + # Test that line alias with parameters passed in is created successfully. + ip.run_line_magic( + "alias_magic", "--line history_alias history --params " + shlex.quote("3") + ) + assert "history_alias" in mm.magics["line"] + + +def test_save(): + """Test %save.""" + ip = get_ipython() + ip.history_manager.reset() # Clear any existing history. + cmds = ["a=1", "def b():\n return a**2", "print(a, b())"] + for i, cmd in enumerate(cmds, start=1): + ip.history_manager.store_inputs(i, cmd) + with TemporaryDirectory() as tmpdir: + file = os.path.join(tmpdir, "testsave.py") + ip.run_line_magic("save", "%s 1-10" % file) + content = Path(file).read_text(encoding="utf-8") + assert content.count(cmds[0]) == 1 + assert "coding: utf-8" in content + ip.run_line_magic("save", "-a %s 1-10" % file) + content = Path(file).read_text(encoding="utf-8") + assert content.count(cmds[0]) == 2 + assert "coding: utf-8" in content + + +def test_save_with_no_args(): + ip = get_ipython() + ip.history_manager.reset() # Clear any existing history. + cmds = ["a=1", "def b():\n return a**2", "print(a, b())", "%save"] + for i, cmd in enumerate(cmds, start=1): + ip.history_manager.store_inputs(i, cmd) + + with TemporaryDirectory() as tmpdir: + path = os.path.join(tmpdir, "testsave.py") + ip.run_line_magic("save", path) + content = Path(path).read_text(encoding="utf-8") + expected_content = dedent( + """\ + # coding: utf-8 + a=1 + def b(): + return a**2 + print(a, b()) + """ + ) + assert content == expected_content + + +def test_store(): + """Test %store.""" + ip = get_ipython() + ip.run_line_magic('load_ext', 'storemagic') + + # make sure the storage is empty + ip.run_line_magic("store", "-z") + ip.user_ns["var"] = 42 + ip.run_line_magic("store", "var") + ip.user_ns["var"] = 39 + ip.run_line_magic("store", "-r") + assert ip.user_ns["var"] == 42 + + ip.run_line_magic("store", "-d var") + ip.user_ns["var"] = 39 + ip.run_line_magic("store", "-r") + assert ip.user_ns["var"] == 39 + + +def _run_edit_test(arg_s, exp_filename=None, + exp_lineno=-1, + exp_contents=None, + exp_is_temp=None): + ip = get_ipython() + M = code.CodeMagics(ip) + last_call = ['',''] + opts,args = M.parse_options(arg_s,'prxn:') + filename, lineno, is_temp = M._find_edit_target(ip, args, opts, last_call) + + if exp_filename is not None: + assert exp_filename == filename + if exp_contents is not None: + with io.open(filename, 'r', encoding='utf-8') as f: + contents = f.read() + assert exp_contents == contents + if exp_lineno != -1: + assert exp_lineno == lineno + if exp_is_temp is not None: + assert exp_is_temp == is_temp + + +def test_edit_interactive(): + """%edit on interactively defined objects""" + ip = get_ipython() + n = ip.execution_count + ip.run_cell("def foo(): return 1", store_history=True) + + with pytest.raises(code.InteractivelyDefined) as e: + _run_edit_test("foo") + assert e.value.index == n + + +def test_edit_cell(): + """%edit [cell id]""" + ip = get_ipython() + + ip.run_cell("def foo(): return 1", store_history=True) + + # test + _run_edit_test("1", exp_contents=ip.user_ns['In'][1], exp_is_temp=True) + +def test_edit_fname(): + """%edit file""" + # test + _run_edit_test("test file.py", exp_filename="test file.py") + +def test_bookmark(): + ip = get_ipython() + ip.run_line_magic('bookmark', 'bmname') + with tt.AssertPrints('bmname'): + ip.run_line_magic('bookmark', '-l') + ip.run_line_magic('bookmark', '-d bmname') + +def test_ls_magic(): + ip = get_ipython() + json_formatter = ip.display_formatter.formatters['application/json'] + json_formatter.enabled = True + lsmagic = ip.run_line_magic("lsmagic", "") + with warnings.catch_warnings(record=True) as w: + j = json_formatter(lsmagic) + assert sorted(j) == ["cell", "line"] + assert w == [] # no warnings + + +def test_strip_initial_indent(): + def sii(s): + lines = s.splitlines() + return '\n'.join(code.strip_initial_indent(lines)) + + assert sii(" a = 1\nb = 2") == "a = 1\nb = 2" + assert sii(" a\n b\nc") == "a\n b\nc" + assert sii("a\n b") == "a\n b" + +def test_logging_magic_quiet_from_arg(): + _ip.config.LoggingMagics.quiet = False + lm = logging.LoggingMagics(shell=_ip) + with TemporaryDirectory() as td: + try: + with tt.AssertNotPrints(re.compile("Activating.*")): + lm.logstart('-q {}'.format( + os.path.join(td, "quiet_from_arg.log"))) + finally: + _ip.logger.logstop() + +def test_logging_magic_quiet_from_config(): + _ip.config.LoggingMagics.quiet = True + lm = logging.LoggingMagics(shell=_ip) + with TemporaryDirectory() as td: + try: + with tt.AssertNotPrints(re.compile("Activating.*")): + lm.logstart(os.path.join(td, "quiet_from_config.log")) + finally: + _ip.logger.logstop() + + +def test_logging_magic_not_quiet(): + _ip.config.LoggingMagics.quiet = False + lm = logging.LoggingMagics(shell=_ip) + with TemporaryDirectory() as td: + try: + with tt.AssertPrints(re.compile("Activating.*")): + lm.logstart(os.path.join(td, "not_quiet.log")) + finally: + _ip.logger.logstop() + + +def test_time_no_var_expand(): + _ip.user_ns["a"] = 5 + _ip.user_ns["b"] = [] + _ip.run_line_magic("time", 'b.append("{a}")') + assert _ip.user_ns["b"] == ["{a}"] + + +# this is slow, put at the end for local testing. +def test_timeit_arguments(): + "Test valid timeit arguments, should not cause SyntaxError (GH #1269)" + _ip.run_line_magic("timeit", "-n1 -r1 a=('#')") + + +MINIMAL_LAZY_MAGIC = """ +from IPython.core.magic import ( + Magics, + magics_class, + line_magic, + cell_magic, +) + + +@magics_class +class LazyMagics(Magics): + @line_magic + def lazy_line(self, line): + print("Lazy Line") + + @cell_magic + def lazy_cell(self, line, cell): + print("Lazy Cell") + + +def load_ipython_extension(ipython): + ipython.register_magics(LazyMagics) +""" + + +def test_lazy_magics(): + with pytest.raises(UsageError): + ip.run_line_magic("lazy_line", "") + + startdir = os.getcwd() + + with TemporaryDirectory() as tmpdir: + with prepended_to_syspath(tmpdir): + ptempdir = Path(tmpdir) + tf = ptempdir / "lazy_magic_module.py" + tf.write_text(MINIMAL_LAZY_MAGIC) + ip.magics_manager.register_lazy("lazy_line", Path(tf.name).name[:-3]) + with tt.AssertPrints("Lazy Line"): + ip.run_line_magic("lazy_line", "") + + +TEST_MODULE = """ +print('Loaded my_tmp') +if __name__ == "__main__": + print('I just ran a script') +""" + +def test_run_module_from_import_hook(): + "Test that a module can be loaded via an import hook" + with TemporaryDirectory() as tmpdir: + fullpath = os.path.join(tmpdir, "my_tmp.py") + Path(fullpath).write_text(TEST_MODULE, encoding="utf-8") + + import importlib.abc + import importlib.util + + class MyTempImporter(importlib.abc.MetaPathFinder, importlib.abc.SourceLoader): + def find_spec(self, fullname, path, target=None): + if fullname == "my_tmp": + return importlib.util.spec_from_loader(fullname, self) + + def get_filename(self, fullname): + assert fullname == "my_tmp" + return fullpath + + def get_data(self, path): + assert Path(path).samefile(fullpath) + return Path(fullpath).read_text(encoding="utf-8") + + sys.meta_path.insert(0, MyTempImporter()) + + with capture_output() as captured: + _ip.run_line_magic("run", "-m my_tmp") + _ip.run_cell("import my_tmp") + + output = "Loaded my_tmp\nI just ran a script\nLoaded my_tmp\n" + assert output == captured.stdout + + sys.meta_path.pop(0) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic_arguments.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic_arguments.py new file mode 100644 index 000000000..8b263b25d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic_arguments.py @@ -0,0 +1,140 @@ +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011, IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +import argparse +import sys + +from IPython.core.magic_arguments import (argument, argument_group, kwds, + magic_arguments, parse_argstring, real_name) + + +@magic_arguments() +@argument('-f', '--foo', help="an argument") +def magic_foo1(self, args): + """ A docstring. + """ + return parse_argstring(magic_foo1, args) + + +@magic_arguments() +def magic_foo2(self, args): + """ A docstring. + """ + return parse_argstring(magic_foo2, args) + + +@magic_arguments() +@argument('-f', '--foo', help="an argument") +@argument_group('Group') +@argument('-b', '--bar', help="a grouped argument") +@argument_group('Second Group') +@argument('-z', '--baz', help="another grouped argument") +def magic_foo3(self, args): + """ A docstring. + """ + return parse_argstring(magic_foo3, args) + + +@magic_arguments() +@kwds(argument_default=argparse.SUPPRESS) +@argument('-f', '--foo', help="an argument") +def magic_foo4(self, args): + """ A docstring. + """ + return parse_argstring(magic_foo4, args) + + +@magic_arguments('frobnicate') +@argument('-f', '--foo', help="an argument") +def magic_foo5(self, args): + """ A docstring. + """ + return parse_argstring(magic_foo5, args) + + +@magic_arguments() +@argument('-f', '--foo', help="an argument") +def magic_magic_foo(self, args): + """ A docstring. + """ + return parse_argstring(magic_magic_foo, args) + + +@magic_arguments() +@argument('-f', '--foo', help="an argument") +def foo(self, args): + """ A docstring. + """ + return parse_argstring(foo, args) + + +def test_magic_arguments(): + # “optional arguments†was replaced with “options†in argparse help + # https://docs.python.org/3/whatsnew/3.10.html#argparse + # https://bugs.python.org/issue9694 + options = "optional arguments" if sys.version_info < (3, 10) else "options" + + assert ( + magic_foo1.__doc__ + == f"::\n\n %foo1 [-f FOO]\n\n A docstring.\n\n{options}:\n -f FOO, --foo FOO an argument\n" + ) + assert getattr(magic_foo1, "argcmd_name", None) == None + assert real_name(magic_foo1) == "foo1" + assert magic_foo1(None, "") == argparse.Namespace(foo=None) + assert hasattr(magic_foo1, "has_arguments") + + assert magic_foo2.__doc__ == "::\n\n %foo2\n\n A docstring.\n" + assert getattr(magic_foo2, "argcmd_name", None) == None + assert real_name(magic_foo2) == "foo2" + assert magic_foo2(None, "") == argparse.Namespace() + assert hasattr(magic_foo2, "has_arguments") + + assert ( + magic_foo3.__doc__ + == f"::\n\n %foo3 [-f FOO] [-b BAR] [-z BAZ]\n\n A docstring.\n\n{options}:\n -f FOO, --foo FOO an argument\n\nGroup:\n -b BAR, --bar BAR a grouped argument\n\nSecond Group:\n -z BAZ, --baz BAZ another grouped argument\n" + ) + assert getattr(magic_foo3, "argcmd_name", None) == None + assert real_name(magic_foo3) == "foo3" + assert magic_foo3(None, "") == argparse.Namespace(bar=None, baz=None, foo=None) + assert hasattr(magic_foo3, "has_arguments") + + assert ( + magic_foo4.__doc__ + == f"::\n\n %foo4 [-f FOO]\n\n A docstring.\n\n{options}:\n -f FOO, --foo FOO an argument\n" + ) + assert getattr(magic_foo4, "argcmd_name", None) == None + assert real_name(magic_foo4) == "foo4" + assert magic_foo4(None, "") == argparse.Namespace() + assert hasattr(magic_foo4, "has_arguments") + + assert ( + magic_foo5.__doc__ + == f"::\n\n %frobnicate [-f FOO]\n\n A docstring.\n\n{options}:\n -f FOO, --foo FOO an argument\n" + ) + assert getattr(magic_foo5, "argcmd_name", None) == "frobnicate" + assert real_name(magic_foo5) == "frobnicate" + assert magic_foo5(None, "") == argparse.Namespace(foo=None) + assert hasattr(magic_foo5, "has_arguments") + + assert ( + magic_magic_foo.__doc__ + == f"::\n\n %magic_foo [-f FOO]\n\n A docstring.\n\n{options}:\n -f FOO, --foo FOO an argument\n" + ) + assert getattr(magic_magic_foo, "argcmd_name", None) == None + assert real_name(magic_magic_foo) == "magic_foo" + assert magic_magic_foo(None, "") == argparse.Namespace(foo=None) + assert hasattr(magic_magic_foo, "has_arguments") + + assert ( + foo.__doc__ + == f"::\n\n %foo [-f FOO]\n\n A docstring.\n\n{options}:\n -f FOO, --foo FOO an argument\n" + ) + assert getattr(foo, "argcmd_name", None) == None + assert real_name(foo) == "foo" + assert foo(None, "") == argparse.Namespace(foo=None) + assert hasattr(foo, "has_arguments") diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic_terminal.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic_terminal.py new file mode 100644 index 000000000..5dfa0f0ed --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_magic_terminal.py @@ -0,0 +1,216 @@ +"""Tests for various magic functions specific to the terminal frontend.""" + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +from io import StringIO +from unittest import TestCase + +from IPython.testing import tools as tt +#----------------------------------------------------------------------------- +# Test functions begin +#----------------------------------------------------------------------------- + + +MINIMAL_LAZY_MAGIC = """ +from IPython.core.magic import ( + Magics, + magics_class, + line_magic, + cell_magic, +) + + +@magics_class +class LazyMagics(Magics): + @line_magic + def lazy_line(self, line): + print("Lazy Line") + + @cell_magic + def lazy_cell(self, line, cell): + print("Lazy Cell") + + +def load_ipython_extension(ipython): + ipython.register_magics(LazyMagics) +""" + +def check_cpaste(code, should_fail=False): + """Execute code via 'cpaste' and ensure it was executed, unless + should_fail is set. + """ + ip.user_ns['code_ran'] = False + + src = StringIO() + src.write(code) + src.write('\n--\n') + src.seek(0) + + stdin_save = sys.stdin + sys.stdin = src + + try: + context = tt.AssertPrints if should_fail else tt.AssertNotPrints + with context("Traceback (most recent call last)"): + ip.run_line_magic("cpaste", "") + + if not should_fail: + assert ip.user_ns['code_ran'], "%r failed" % code + finally: + sys.stdin = stdin_save + +def test_cpaste(): + """Test cpaste magic""" + + def runf(): + """Marker function: sets a flag when executed. + """ + ip.user_ns['code_ran'] = True + return 'runf' # return string so '+ runf()' doesn't result in success + + tests = {'pass': ["runf()", + "In [1]: runf()", + "In [1]: if 1:\n ...: runf()", + "> > > runf()", + ">>> runf()", + " >>> runf()", + ], + + 'fail': ["1 + runf()", + "++ runf()", + ]} + + ip.user_ns['runf'] = runf + + for code in tests['pass']: + check_cpaste(code) + + for code in tests['fail']: + check_cpaste(code, should_fail=True) + + + +class PasteTestCase(TestCase): + """Multiple tests for clipboard pasting""" + + def paste(self, txt, flags='-q'): + """Paste input text, by default in quiet mode""" + ip.hooks.clipboard_get = lambda: txt + ip.run_line_magic("paste", flags) + + def setUp(self): + # Inject fake clipboard hook but save original so we can restore it later + self.original_clip = ip.hooks.clipboard_get + + def tearDown(self): + # Restore original hook + ip.hooks.clipboard_get = self.original_clip + + def test_paste(self): + ip.user_ns.pop("x", None) + self.paste("x = 1") + self.assertEqual(ip.user_ns["x"], 1) + ip.user_ns.pop("x") + + def test_paste_pyprompt(self): + ip.user_ns.pop("x", None) + self.paste(">>> x=2") + self.assertEqual(ip.user_ns["x"], 2) + ip.user_ns.pop("x") + + def test_paste_py_multi(self): + self.paste( + """ + >>> x = [1,2,3] + >>> y = [] + >>> for i in x: + ... y.append(i**2) + ... + """ + ) + self.assertEqual(ip.user_ns["x"], [1, 2, 3]) + self.assertEqual(ip.user_ns["y"], [1, 4, 9]) + + def test_paste_py_multi_r(self): + "Now, test that self.paste -r works" + self.test_paste_py_multi() + self.assertEqual(ip.user_ns.pop("x"), [1, 2, 3]) + self.assertEqual(ip.user_ns.pop("y"), [1, 4, 9]) + self.assertFalse("x" in ip.user_ns) + ip.run_line_magic("paste", "-r") + self.assertEqual(ip.user_ns["x"], [1, 2, 3]) + self.assertEqual(ip.user_ns["y"], [1, 4, 9]) + + def test_paste_email(self): + "Test pasting of email-quoted contents" + self.paste( + """\ + >> def foo(x): + >> return x + 1 + >> xx = foo(1.1)""" + ) + self.assertEqual(ip.user_ns["xx"], 2.1) + + def test_paste_email2(self): + "Email again; some programs add a space also at each quoting level" + self.paste( + """\ + > > def foo(x): + > > return x + 1 + > > yy = foo(2.1) """ + ) + self.assertEqual(ip.user_ns["yy"], 3.1) + + def test_paste_email_py(self): + "Email quoting of interactive input" + self.paste( + """\ + >> >>> def f(x): + >> ... return x+1 + >> ... + >> >>> zz = f(2.5) """ + ) + self.assertEqual(ip.user_ns["zz"], 3.5) + + def test_paste_echo(self): + "Also test self.paste echoing, by temporarily faking the writer" + w = StringIO() + old_write = sys.stdout.write + sys.stdout.write = w.write + code = """ + a = 100 + b = 200""" + try: + self.paste(code,'') + out = w.getvalue() + finally: + sys.stdout.write = old_write + self.assertEqual(ip.user_ns["a"], 100) + self.assertEqual(ip.user_ns["b"], 200) + assert out == code + "\n## -- End pasted text --\n" + + def test_paste_leading_commas(self): + "Test multiline strings with leading commas" + tm = ip.magics_manager.registry['TerminalMagics'] + s = '''\ +a = """ +,1,2,3 +"""''' + ip.user_ns.pop("foo", None) + tm.store_or_execute(s, "foo") + self.assertIn("foo", ip.user_ns) + + def test_paste_trailing_question(self): + "Test pasting sources with trailing question marks" + tm = ip.magics_manager.registry['TerminalMagics'] + s = '''\ +def funcfoo(): + if True: #am i true? + return 'fooresult' +''' + ip.user_ns.pop('funcfoo', None) + self.paste(s) + self.assertEqual(ip.user_ns["funcfoo"](), "fooresult") diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_oinspect.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_oinspect.py new file mode 100644 index 000000000..8ae146fa4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_oinspect.py @@ -0,0 +1,541 @@ +"""Tests for the object inspection functionality. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from contextlib import contextmanager +from inspect import signature, Signature, Parameter +import inspect +import os +import pytest +import re +import sys + +from .. import oinspect + +from decorator import decorator + +from IPython.testing.tools import AssertPrints, AssertNotPrints +from IPython.utils.path import compress_user + + +#----------------------------------------------------------------------------- +# Globals and constants +#----------------------------------------------------------------------------- + +inspector = None + +def setup_module(): + global inspector + inspector = oinspect.Inspector() + + +class SourceModuleMainTest: + __module__ = "__main__" + + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- + +# WARNING: since this test checks the line number where a function is +# defined, if any code is inserted above, the following line will need to be +# updated. Do NOT insert any whitespace between the next line and the function +# definition below. +THIS_LINE_NUMBER = 47 # Put here the actual number of this line + + +def test_find_source_lines(): + assert oinspect.find_source_lines(test_find_source_lines) == THIS_LINE_NUMBER + 3 + assert oinspect.find_source_lines(type) is None + assert oinspect.find_source_lines(SourceModuleMainTest) is None + assert oinspect.find_source_lines(SourceModuleMainTest()) is None + + +def test_getsource(): + assert oinspect.getsource(type) is None + assert oinspect.getsource(SourceModuleMainTest) is None + assert oinspect.getsource(SourceModuleMainTest()) is None + + +def test_inspect_getfile_raises_exception(): + """Check oinspect.find_file/getsource/find_source_lines expectations""" + with pytest.raises(TypeError): + inspect.getfile(type) + with pytest.raises(OSError if sys.version_info >= (3, 10) else TypeError): + inspect.getfile(SourceModuleMainTest) + + +# A couple of utilities to ensure these tests work the same from a source or a +# binary install +def pyfile(fname): + return os.path.normcase(re.sub('.py[co]$', '.py', fname)) + + +def match_pyfiles(f1, f2): + assert pyfile(f1) == pyfile(f2) + + +def test_find_file(): + match_pyfiles(oinspect.find_file(test_find_file), os.path.abspath(__file__)) + assert oinspect.find_file(type) is None + assert oinspect.find_file(SourceModuleMainTest) is None + assert oinspect.find_file(SourceModuleMainTest()) is None + + +def test_find_file_decorated1(): + + @decorator + def noop1(f): + def wrapper(*a, **kw): + return f(*a, **kw) + return wrapper + + @noop1 + def f(x): + "My docstring" + + match_pyfiles(oinspect.find_file(f), os.path.abspath(__file__)) + assert f.__doc__ == "My docstring" + + +def test_find_file_decorated2(): + + @decorator + def noop2(f, *a, **kw): + return f(*a, **kw) + + @noop2 + @noop2 + @noop2 + def f(x): + "My docstring 2" + + match_pyfiles(oinspect.find_file(f), os.path.abspath(__file__)) + assert f.__doc__ == "My docstring 2" + + +def test_find_file_magic(): + run = ip.find_line_magic('run') + assert oinspect.find_file(run) is not None + + +# A few generic objects we can then inspect in the tests below + +class Call(object): + """This is the class docstring.""" + + def __init__(self, x, y=1): + """This is the constructor docstring.""" + + def __call__(self, *a, **kw): + """This is the call docstring.""" + + def method(self, x, z=2): + """Some method's docstring""" + +class HasSignature(object): + """This is the class docstring.""" + __signature__ = Signature([Parameter('test', Parameter.POSITIONAL_OR_KEYWORD)]) + + def __init__(self, *args): + """This is the init docstring""" + + +class SimpleClass(object): + def method(self, x, z=2): + """Some method's docstring""" + + +class Awkward(object): + def __getattr__(self, name): + raise Exception(name) + +class NoBoolCall: + """ + callable with `__bool__` raising should still be inspect-able. + """ + + def __call__(self): + """does nothing""" + pass + + def __bool__(self): + """just raise NotImplemented""" + raise NotImplementedError('Must be implemented') + + +class SerialLiar(object): + """Attribute accesses always get another copy of the same class. + + unittest.mock.call does something similar, but it's not ideal for testing + as the failure mode is to eat all your RAM. This gives up after 10k levels. + """ + def __init__(self, max_fibbing_twig, lies_told=0): + if lies_told > 10000: + raise RuntimeError('Nose too long, honesty is the best policy') + self.max_fibbing_twig = max_fibbing_twig + self.lies_told = lies_told + max_fibbing_twig[0] = max(max_fibbing_twig[0], lies_told) + + def __getattr__(self, item): + return SerialLiar(self.max_fibbing_twig, self.lies_told + 1) + +#----------------------------------------------------------------------------- +# Tests +#----------------------------------------------------------------------------- + +def test_info(): + "Check that Inspector.info fills out various fields as expected." + i = inspector.info(Call, oname="Call") + assert i["type_name"] == "type" + expected_class = str(type(type)) # (Python 3) or + assert i["base_class"] == expected_class + assert re.search( + "", + i["string_form"], + ) + fname = __file__ + if fname.endswith(".pyc"): + fname = fname[:-1] + # case-insensitive comparison needed on some filesystems + # e.g. Windows: + assert i["file"].lower() == compress_user(fname).lower() + assert i["definition"] == None + assert i["docstring"] == Call.__doc__ + assert i["source"] == None + assert i["isclass"] is True + assert i["init_definition"] == "Call(x, y=1)" + assert i["init_docstring"] == Call.__init__.__doc__ + + i = inspector.info(Call, detail_level=1) + assert i["source"] is not None + assert i["docstring"] == None + + c = Call(1) + c.__doc__ = "Modified instance docstring" + i = inspector.info(c) + assert i["type_name"] == "Call" + assert i["docstring"] == "Modified instance docstring" + assert i["class_docstring"] == Call.__doc__ + assert i["init_docstring"] == Call.__init__.__doc__ + assert i["call_docstring"] == Call.__call__.__doc__ + + +def test_class_signature(): + info = inspector.info(HasSignature, "HasSignature") + assert info["init_definition"] == "HasSignature(test)" + assert info["init_docstring"] == HasSignature.__init__.__doc__ + + +def test_info_awkward(): + # Just test that this doesn't throw an error. + inspector.info(Awkward()) + +def test_bool_raise(): + inspector.info(NoBoolCall()) + +def test_info_serialliar(): + fib_tracker = [0] + inspector.info(SerialLiar(fib_tracker)) + + # Nested attribute access should be cut off at 100 levels deep to avoid + # infinite loops: https://github.com/ipython/ipython/issues/9122 + assert fib_tracker[0] < 9000 + +def support_function_one(x, y=2, *a, **kw): + """A simple function.""" + +def test_calldef_none(): + # We should ignore __call__ for all of these. + for obj in [support_function_one, SimpleClass().method, any, str.upper]: + i = inspector.info(obj) + assert i["call_def"] is None + + +def f_kwarg(pos, *, kwonly): + pass + +def test_definition_kwonlyargs(): + i = inspector.info(f_kwarg, oname="f_kwarg") # analysis:ignore + assert i["definition"] == "f_kwarg(pos, *, kwonly)" + + +def test_getdoc(): + class A(object): + """standard docstring""" + pass + + class B(object): + """standard docstring""" + def getdoc(self): + return "custom docstring" + + class C(object): + """standard docstring""" + def getdoc(self): + return None + + a = A() + b = B() + c = C() + + assert oinspect.getdoc(a) == "standard docstring" + assert oinspect.getdoc(b) == "custom docstring" + assert oinspect.getdoc(c) == "standard docstring" + + +def test_empty_property_has_no_source(): + i = inspector.info(property(), detail_level=1) + assert i["source"] is None + + +def test_property_sources(): + # A simple adder whose source and signature stays + # the same across Python distributions + def simple_add(a, b): + "Adds two numbers" + return a + b + + class A(object): + @property + def foo(self): + return 'bar' + + foo = foo.setter(lambda self, v: setattr(self, 'bar', v)) + + dname = property(oinspect.getdoc) + adder = property(simple_add) + + i = inspector.info(A.foo, detail_level=1) + assert "def foo(self):" in i["source"] + assert "lambda self, v:" in i["source"] + + i = inspector.info(A.dname, detail_level=1) + assert "def getdoc(obj)" in i["source"] + + i = inspector.info(A.adder, detail_level=1) + assert "def simple_add(a, b)" in i["source"] + + +def test_property_docstring_is_in_info_for_detail_level_0(): + class A(object): + @property + def foobar(self): + """This is `foobar` property.""" + pass + + ip.user_ns["a_obj"] = A() + assert ( + "This is `foobar` property." + == ip.object_inspect("a_obj.foobar", detail_level=0)["docstring"] + ) + + ip.user_ns["a_cls"] = A + assert ( + "This is `foobar` property." + == ip.object_inspect("a_cls.foobar", detail_level=0)["docstring"] + ) + + +def test_pdef(): + # See gh-1914 + def foo(): pass + inspector.pdef(foo, 'foo') + + +@contextmanager +def cleanup_user_ns(**kwargs): + """ + On exit delete all the keys that were not in user_ns before entering. + + It does not restore old values ! + + Parameters + ---------- + + **kwargs + used to update ip.user_ns + + """ + try: + known = set(ip.user_ns.keys()) + ip.user_ns.update(kwargs) + yield + finally: + added = set(ip.user_ns.keys()) - known + for k in added: + del ip.user_ns[k] + + +def test_pinfo_getindex(): + def dummy(): + """ + MARKER + """ + + container = [dummy] + with cleanup_user_ns(container=container): + with AssertPrints("MARKER"): + ip._inspect("pinfo", "container[0]", detail_level=0) + assert "container" not in ip.user_ns.keys() + + +def test_qmark_getindex(): + def dummy(): + """ + MARKER 2 + """ + + container = [dummy] + with cleanup_user_ns(container=container): + with AssertPrints("MARKER 2"): + ip.run_cell("container[0]?") + assert "container" not in ip.user_ns.keys() + + +def test_qmark_getindex_negatif(): + def dummy(): + """ + MARKER 3 + """ + + container = [dummy] + with cleanup_user_ns(container=container): + with AssertPrints("MARKER 3"): + ip.run_cell("container[-1]?") + assert "container" not in ip.user_ns.keys() + + + +def test_pinfo_nonascii(): + # See gh-1177 + from . import nonascii2 + ip.user_ns['nonascii2'] = nonascii2 + ip._inspect('pinfo', 'nonascii2', detail_level=1) + +def test_pinfo_type(): + """ + type can fail in various edge case, for example `type.__subclass__()` + """ + ip._inspect('pinfo', 'type') + + +def test_pinfo_docstring_no_source(): + """Docstring should be included with detail_level=1 if there is no source""" + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'str.format', detail_level=0) + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'str.format', detail_level=1) + + +def test_pinfo_no_docstring_if_source(): + """Docstring should not be included with detail_level=1 if source is found""" + def foo(): + """foo has a docstring""" + + ip.user_ns['foo'] = foo + + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'foo', detail_level=0) + with AssertPrints('Source:'): + ip._inspect('pinfo', 'foo', detail_level=1) + with AssertNotPrints('Docstring:'): + ip._inspect('pinfo', 'foo', detail_level=1) + + +def test_pinfo_docstring_if_detail_and_no_source(): + """ Docstring should be displayed if source info not available """ + obj_def = '''class Foo(object): + """ This is a docstring for Foo """ + def bar(self): + """ This is a docstring for Foo.bar """ + pass + ''' + + ip.run_cell(obj_def) + ip.run_cell('foo = Foo()') + + with AssertNotPrints("Source:"): + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'foo', detail_level=0) + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'foo', detail_level=1) + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'foo.bar', detail_level=0) + + with AssertNotPrints('Docstring:'): + with AssertPrints('Source:'): + ip._inspect('pinfo', 'foo.bar', detail_level=1) + + +def test_pinfo_magic(): + with AssertPrints('Docstring:'): + ip._inspect('pinfo', 'lsmagic', detail_level=0) + + with AssertPrints('Source:'): + ip._inspect('pinfo', 'lsmagic', detail_level=1) + + +def test_init_colors(): + # ensure colors are not present in signature info + info = inspector.info(HasSignature) + init_def = info["init_definition"] + assert "[0m" not in init_def + + +def test_builtin_init(): + info = inspector.info(list) + init_def = info['init_definition'] + assert init_def is not None + + +def test_render_signature_short(): + def short_fun(a=1): pass + sig = oinspect._render_signature( + signature(short_fun), + short_fun.__name__, + ) + assert sig == "short_fun(a=1)" + + +def test_render_signature_long(): + from typing import Optional + + def long_function( + a_really_long_parameter: int, + and_another_long_one: bool = False, + let_us_make_sure_this_is_looong: Optional[str] = None, + ) -> bool: pass + + sig = oinspect._render_signature( + signature(long_function), + long_function.__name__, + ) + assert sig in [ + # Python >=3.9 + '''\ +long_function( + a_really_long_parameter: int, + and_another_long_one: bool = False, + let_us_make_sure_this_is_looong: Optional[str] = None, +) -> bool\ +''', + # Python >=3.7 + '''\ +long_function( + a_really_long_parameter: int, + and_another_long_one: bool = False, + let_us_make_sure_this_is_looong: Union[str, NoneType] = None, +) -> bool\ +''', # Python <=3.6 + '''\ +long_function( + a_really_long_parameter:int, + and_another_long_one:bool=False, + let_us_make_sure_this_is_looong:Union[str, NoneType]=None, +) -> bool\ +''', + ] diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_page.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_page.py new file mode 100644 index 000000000..9f6a37424 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_page.py @@ -0,0 +1,20 @@ +#----------------------------------------------------------------------------- +# Copyright (C) 2010-2011 The IPython Development Team. +# +# Distributed under the terms of the BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- +import io + +# N.B. For the test suite, page.page is overridden (see IPython.testing.globalipapp) +from IPython.core import page + +def test_detect_screen_size(): + """Simple smoketest for page._detect_screen_size.""" + try: + page._detect_screen_size(True, 25) + except (TypeError, io.UnsupportedOperation): + # This can happen in the test suite, because stdout may not have a + # fileno. + pass diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_paths.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_paths.py new file mode 100644 index 000000000..86367b61e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_paths.py @@ -0,0 +1,200 @@ +import errno +import os +import shutil +import tempfile +import warnings +from unittest.mock import patch + +from tempfile import TemporaryDirectory +from testpath import assert_isdir, assert_isfile, modified_env + +from IPython import paths +from IPython.testing.decorators import skip_win32 + +TMP_TEST_DIR = os.path.realpath(tempfile.mkdtemp()) +HOME_TEST_DIR = os.path.join(TMP_TEST_DIR, "home_test_dir") +XDG_TEST_DIR = os.path.join(HOME_TEST_DIR, "xdg_test_dir") +XDG_CACHE_DIR = os.path.join(HOME_TEST_DIR, "xdg_cache_dir") +IP_TEST_DIR = os.path.join(HOME_TEST_DIR,'.ipython') + +def setup_module(): + """Setup testenvironment for the module: + + - Adds dummy home dir tree + """ + # Do not mask exceptions here. In particular, catching WindowsError is a + # problem because that exception is only defined on Windows... + os.makedirs(IP_TEST_DIR) + os.makedirs(os.path.join(XDG_TEST_DIR, 'ipython')) + os.makedirs(os.path.join(XDG_CACHE_DIR, 'ipython')) + + +def teardown_module(): + """Teardown testenvironment for the module: + + - Remove dummy home dir tree + """ + # Note: we remove the parent test dir, which is the root of all test + # subdirs we may have created. Use shutil instead of os.removedirs, so + # that non-empty directories are all recursively removed. + shutil.rmtree(TMP_TEST_DIR) + +def patch_get_home_dir(dirpath): + return patch.object(paths, 'get_home_dir', return_value=dirpath) + + +def test_get_ipython_dir_1(): + """test_get_ipython_dir_1, Testcase to see if we can call get_ipython_dir without Exceptions.""" + env_ipdir = os.path.join("someplace", ".ipython") + with patch.object(paths, '_writable_dir', return_value=True), \ + modified_env({'IPYTHONDIR': env_ipdir}): + ipdir = paths.get_ipython_dir() + + assert ipdir == env_ipdir + +def test_get_ipython_dir_2(): + """test_get_ipython_dir_2, Testcase to see if we can call get_ipython_dir without Exceptions.""" + with patch_get_home_dir('someplace'), \ + patch.object(paths, 'get_xdg_dir', return_value=None), \ + patch.object(paths, '_writable_dir', return_value=True), \ + patch('os.name', "posix"), \ + modified_env({'IPYTHON_DIR': None, + 'IPYTHONDIR': None, + 'XDG_CONFIG_HOME': None + }): + ipdir = paths.get_ipython_dir() + + assert ipdir == os.path.join("someplace", ".ipython") + +def test_get_ipython_dir_3(): + """test_get_ipython_dir_3, use XDG if defined and exists, and .ipython doesn't exist.""" + tmphome = TemporaryDirectory() + try: + with patch_get_home_dir(tmphome.name), \ + patch('os.name', 'posix'), \ + modified_env({ + 'IPYTHON_DIR': None, + 'IPYTHONDIR': None, + 'XDG_CONFIG_HOME': XDG_TEST_DIR, + }), warnings.catch_warnings(record=True) as w: + ipdir = paths.get_ipython_dir() + + assert ipdir == os.path.join(tmphome.name, XDG_TEST_DIR, "ipython") + assert len(w) == 0 + finally: + tmphome.cleanup() + +def test_get_ipython_dir_4(): + """test_get_ipython_dir_4, warn if XDG and home both exist.""" + with patch_get_home_dir(HOME_TEST_DIR), \ + patch('os.name', 'posix'): + try: + os.mkdir(os.path.join(XDG_TEST_DIR, 'ipython')) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + + with modified_env({ + 'IPYTHON_DIR': None, + 'IPYTHONDIR': None, + 'XDG_CONFIG_HOME': XDG_TEST_DIR, + }), warnings.catch_warnings(record=True) as w: + ipdir = paths.get_ipython_dir() + + assert len(w) == 1 + assert "Ignoring" in str(w[0]) + + +def test_get_ipython_dir_5(): + """test_get_ipython_dir_5, use .ipython if exists and XDG defined, but doesn't exist.""" + with patch_get_home_dir(HOME_TEST_DIR), \ + patch('os.name', 'posix'): + try: + os.rmdir(os.path.join(XDG_TEST_DIR, 'ipython')) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + with modified_env({ + 'IPYTHON_DIR': None, + 'IPYTHONDIR': None, + 'XDG_CONFIG_HOME': XDG_TEST_DIR, + }): + ipdir = paths.get_ipython_dir() + + assert ipdir == IP_TEST_DIR + +def test_get_ipython_dir_6(): + """test_get_ipython_dir_6, use home over XDG if defined and neither exist.""" + xdg = os.path.join(HOME_TEST_DIR, 'somexdg') + os.mkdir(xdg) + shutil.rmtree(os.path.join(HOME_TEST_DIR, '.ipython')) + print(paths._writable_dir) + with patch_get_home_dir(HOME_TEST_DIR), \ + patch.object(paths, 'get_xdg_dir', return_value=xdg), \ + patch('os.name', 'posix'), \ + modified_env({ + 'IPYTHON_DIR': None, + 'IPYTHONDIR': None, + 'XDG_CONFIG_HOME': None, + }), warnings.catch_warnings(record=True) as w: + ipdir = paths.get_ipython_dir() + + assert ipdir == os.path.join(HOME_TEST_DIR, ".ipython") + assert len(w) == 0 + +def test_get_ipython_dir_7(): + """test_get_ipython_dir_7, test home directory expansion on IPYTHONDIR""" + home_dir = os.path.normpath(os.path.expanduser('~')) + with modified_env({'IPYTHONDIR': os.path.join('~', 'somewhere')}), \ + patch.object(paths, '_writable_dir', return_value=True): + ipdir = paths.get_ipython_dir() + assert ipdir == os.path.join(home_dir, "somewhere") + + +@skip_win32 +def test_get_ipython_dir_8(): + """test_get_ipython_dir_8, test / home directory""" + if not os.access("/", os.W_OK): + # test only when HOME directory actually writable + return + + with patch.object(paths, "_writable_dir", lambda path: bool(path)), patch.object( + paths, "get_xdg_dir", return_value=None + ), modified_env( + { + "IPYTHON_DIR": None, + "IPYTHONDIR": None, + "HOME": "/", + } + ): + assert paths.get_ipython_dir() == "/.ipython" + + +def test_get_ipython_cache_dir(): + with modified_env({'HOME': HOME_TEST_DIR}): + if os.name == "posix": + # test default + os.makedirs(os.path.join(HOME_TEST_DIR, ".cache")) + with modified_env({'XDG_CACHE_HOME': None}): + ipdir = paths.get_ipython_cache_dir() + assert os.path.join(HOME_TEST_DIR, ".cache", "ipython") == ipdir + assert_isdir(ipdir) + + # test env override + with modified_env({"XDG_CACHE_HOME": XDG_CACHE_DIR}): + ipdir = paths.get_ipython_cache_dir() + assert_isdir(ipdir) + assert ipdir == os.path.join(XDG_CACHE_DIR, "ipython") + else: + assert paths.get_ipython_cache_dir() == paths.get_ipython_dir() + +def test_get_ipython_package_dir(): + ipdir = paths.get_ipython_package_dir() + assert_isdir(ipdir) + + +def test_get_ipython_module_path(): + ipapp_path = paths.get_ipython_module_path('IPython.terminal.ipapp') + assert_isfile(ipapp_path) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_prefilter.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_prefilter.py new file mode 100644 index 000000000..91c3c8688 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_prefilter.py @@ -0,0 +1,127 @@ +"""Tests for input manipulation machinery.""" + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import pytest + +from IPython.core.prefilter import AutocallChecker + +#----------------------------------------------------------------------------- +# Tests +#----------------------------------------------------------------------------- + +def test_prefilter(): + """Test user input conversions""" + + # pairs of (raw, expected correct) input + pairs = [ ('2+2','2+2'), + ] + + for raw, correct in pairs: + assert ip.prefilter(raw) == correct + +def test_prefilter_shadowed(): + def dummy_magic(line): pass + + prev_automagic_state = ip.automagic + ip.automagic = True + ip.autocall = 0 + + try: + # These should not be transformed - they are shadowed by other names + for name in ['if', 'zip', 'get_ipython']: # keyword, builtin, global + ip.register_magic_function(dummy_magic, magic_name=name) + res = ip.prefilter(name + " foo") + assert res == name + " foo" + del ip.magics_manager.magics["line"][name] + + # These should be transformed + for name in ['fi', 'piz', 'nohtypi_teg']: + ip.register_magic_function(dummy_magic, magic_name=name) + res = ip.prefilter(name + " foo") + assert res != name + " foo" + del ip.magics_manager.magics["line"][name] + + finally: + ip.automagic = prev_automagic_state + +def test_autocall_binops(): + """See https://github.com/ipython/ipython/issues/81""" + ip.magic('autocall 2') + f = lambda x: x + ip.user_ns['f'] = f + try: + assert ip.prefilter("f 1") == "f(1)" + for t in ["f +1", "f -1"]: + assert ip.prefilter(t) == t + + # Run tests again with a more permissive exclude_regexp, which will + # allow transformation of binary operations ('f -1' -> 'f(-1)'). + pm = ip.prefilter_manager + ac = AutocallChecker(shell=pm.shell, prefilter_manager=pm, + config=pm.config) + try: + ac.priority = 1 + ac.exclude_regexp = r'^[,&^\|\*/]|^is |^not |^in |^and |^or ' + pm.sort_checkers() + + assert ip.prefilter("f -1") == "f(-1)" + assert ip.prefilter("f +1") == "f(+1)" + finally: + pm.unregister_checker(ac) + finally: + ip.magic('autocall 0') + del ip.user_ns['f'] + + +def test_issue_114(): + """Check that multiline string literals don't expand as magic + see http://github.com/ipython/ipython/issues/114""" + + template = '"""\n%s\n"""' + # Store the current value of multi_line_specials and turn it off before + # running test, since it could be true (case in which the test doesn't make + # sense, as multiline string literals *will* expand as magic in that case). + msp = ip.prefilter_manager.multi_line_specials + ip.prefilter_manager.multi_line_specials = False + try: + for mgk in ip.magics_manager.lsmagic()['line']: + raw = template % mgk + assert ip.prefilter(raw) == raw + finally: + ip.prefilter_manager.multi_line_specials = msp + + +def test_prefilter_attribute_errors(): + """Capture exceptions thrown by user objects on attribute access. + + See http://github.com/ipython/ipython/issues/988.""" + + class X(object): + def __getattr__(self, k): + raise ValueError('broken object') + def __call__(self, x): + return x + + # Create a callable broken object + ip.user_ns['x'] = X() + ip.magic('autocall 2') + try: + # Even if x throws an attribute error when looking at its rewrite + # attribute, we should not crash. So the test here is simply making + # the prefilter call and not having an exception. + ip.prefilter('x 1') + finally: + del ip.user_ns['x'] + ip.magic('autocall 0') + + +def test_autocall_should_support_unicode(): + ip.magic('autocall 2') + ip.user_ns['π'] = lambda x: x + try: + assert ip.prefilter("π 3") == "π(3)" + finally: + ip.magic('autocall 0') + del ip.user_ns['π'] diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_profile.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_profile.py new file mode 100644 index 000000000..876c7fda8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_profile.py @@ -0,0 +1,155 @@ +# coding: utf-8 +"""Tests for profile-related functions. + +Currently only the startup-dir functionality is tested, but more tests should +be added for: + + * ipython profile create + * ipython profile list + * ipython profile create --parallel + * security dir permissions + +Authors +------- + +* MinRK + +""" + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import shutil +import sys +import tempfile +from pathlib import Path +from unittest import TestCase + +from tempfile import TemporaryDirectory + +from IPython.core.profileapp import list_bundled_profiles, list_profiles_in +from IPython.core.profiledir import ProfileDir +from IPython.testing import decorators as dec +from IPython.testing import tools as tt +from IPython.utils.process import getoutput + +#----------------------------------------------------------------------------- +# Globals +#----------------------------------------------------------------------------- +TMP_TEST_DIR = Path(tempfile.mkdtemp()) +HOME_TEST_DIR = TMP_TEST_DIR / "home_test_dir" +IP_TEST_DIR = HOME_TEST_DIR / ".ipython" + +# +# Setup/teardown functions/decorators +# + +def setup_module(): + """Setup test environment for the module: + + - Adds dummy home dir tree + """ + # Do not mask exceptions here. In particular, catching WindowsError is a + # problem because that exception is only defined on Windows... + (Path.cwd() / IP_TEST_DIR).mkdir(parents=True) + + +def teardown_module(): + """Teardown test environment for the module: + + - Remove dummy home dir tree + """ + # Note: we remove the parent test dir, which is the root of all test + # subdirs we may have created. Use shutil instead of os.removedirs, so + # that non-empty directories are all recursively removed. + shutil.rmtree(TMP_TEST_DIR) + + +#----------------------------------------------------------------------------- +# Test functions +#----------------------------------------------------------------------------- +class ProfileStartupTest(TestCase): + def setUp(self): + # create profile dir + self.pd = ProfileDir.create_profile_dir_by_name(IP_TEST_DIR, "test") + self.options = ["--ipython-dir", IP_TEST_DIR, "--profile", "test"] + self.fname = TMP_TEST_DIR / "test.py" + + def tearDown(self): + # We must remove this profile right away so its presence doesn't + # confuse other tests. + shutil.rmtree(self.pd.location) + + def init(self, startup_file, startup, test): + # write startup python file + with open(Path(self.pd.startup_dir) / startup_file, "w", encoding="utf-8") as f: + f.write(startup) + # write simple test file, to check that the startup file was run + with open(self.fname, "w", encoding="utf-8") as f: + f.write(test) + + def validate(self, output): + tt.ipexec_validate(self.fname, output, "", options=self.options) + + def test_startup_py(self): + self.init('00-start.py', 'zzz=123\n', 'print(zzz)\n') + self.validate('123') + + def test_startup_ipy(self): + self.init('00-start.ipy', '%xmode plain\n', '') + self.validate('Exception reporting mode: Plain') + + +def test_list_profiles_in(): + # No need to remove these directories and files, as they will get nuked in + # the module-level teardown. + td = Path(tempfile.mkdtemp(dir=TMP_TEST_DIR)) + for name in ("profile_foo", "profile_hello", "not_a_profile"): + Path(td / name).mkdir(parents=True) + if dec.unicode_paths: + Path(td / "profile_ünicode").mkdir(parents=True) + + with open(td / "profile_file", "w", encoding="utf-8") as f: + f.write("I am not a profile directory") + profiles = list_profiles_in(td) + + # unicode normalization can turn u'ünicode' into u'u\0308nicode', + # so only check for *nicode, and that creating a ProfileDir from the + # name remains valid + found_unicode = False + for p in list(profiles): + if p.endswith('nicode'): + pd = ProfileDir.find_profile_dir_by_name(td, p) + profiles.remove(p) + found_unicode = True + break + if dec.unicode_paths: + assert found_unicode is True + assert set(profiles) == {"foo", "hello"} + + +def test_list_bundled_profiles(): + # This variable will need to be updated when a new profile gets bundled + bundled = sorted(list_bundled_profiles()) + assert bundled == [] + + +def test_profile_create_ipython_dir(): + """ipython profile create respects --ipython-dir""" + with TemporaryDirectory() as td: + getoutput( + [ + sys.executable, + "-m", + "IPython", + "profile", + "create", + "foo", + "--ipython-dir=%s" % td, + ] + ) + profile_dir = Path(td) / "profile_foo" + assert Path(profile_dir).exists() + ipython_config = profile_dir / "ipython_config.py" + assert Path(ipython_config).exists() diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_prompts.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_prompts.py new file mode 100644 index 000000000..95e6163b2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_prompts.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 +"""Tests for prompt generation.""" + +import unittest + +from IPython.core.prompts import LazyEvaluate + +class PromptTests(unittest.TestCase): + def test_lazy_eval_unicode(self): + u = u'ünicødé' + lz = LazyEvaluate(lambda : u) + self.assertEqual(str(lz), u) + self.assertEqual(format(lz), u) + + def test_lazy_eval_nonascii_bytes(self): + u = u'ünicødé' + b = u.encode('utf8') + lz = LazyEvaluate(lambda : b) + # unicode(lz) would fail + self.assertEqual(str(lz), str(b)) + self.assertEqual(format(lz), str(b)) + + def test_lazy_eval_float(self): + f = 0.503 + lz = LazyEvaluate(lambda : f) + + self.assertEqual(str(lz), str(f)) + self.assertEqual(format(lz), str(f)) + self.assertEqual(format(lz, '.1'), '0.5') + diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_pylabtools.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_pylabtools.py new file mode 100644 index 000000000..59bf3bccd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_pylabtools.py @@ -0,0 +1,271 @@ +"""Tests for pylab tools module. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from binascii import a2b_base64 +from io import BytesIO + +import pytest + +matplotlib = pytest.importorskip("matplotlib") +matplotlib.use('Agg') +from matplotlib.figure import Figure + +from matplotlib import pyplot as plt +from matplotlib_inline import backend_inline +import numpy as np + +from IPython.core.getipython import get_ipython +from IPython.core.interactiveshell import InteractiveShell +from IPython.core.display import _PNG, _JPEG +from .. import pylabtools as pt + +from IPython.testing import decorators as dec + + +def test_figure_to_svg(): + # simple empty-figure test + fig = plt.figure() + assert pt.print_figure(fig, "svg") is None + + plt.close('all') + + # simple check for at least svg-looking output + fig = plt.figure() + ax = fig.add_subplot(1,1,1) + ax.plot([1,2,3]) + plt.draw() + svg = pt.print_figure(fig, "svg")[:100].lower() + assert "doctype svg" in svg + + +def _check_pil_jpeg_bytes(): + """Skip if PIL can't write JPEGs to BytesIO objects""" + # PIL's JPEG plugin can't write to BytesIO objects + # Pillow fixes this + from PIL import Image + buf = BytesIO() + img = Image.new("RGB", (4,4)) + try: + img.save(buf, 'jpeg') + except Exception as e: + ename = e.__class__.__name__ + raise pytest.skip("PIL can't write JPEG to BytesIO: %s: %s" % (ename, e)) from e + +@dec.skip_without("PIL.Image") +def test_figure_to_jpeg(): + _check_pil_jpeg_bytes() + # simple check for at least jpeg-looking output + fig = plt.figure() + ax = fig.add_subplot(1,1,1) + ax.plot([1,2,3]) + plt.draw() + jpeg = pt.print_figure(fig, 'jpeg', pil_kwargs={'optimize': 50})[:100].lower() + assert jpeg.startswith(_JPEG) + +def test_retina_figure(): + # simple empty-figure test + fig = plt.figure() + assert pt.retina_figure(fig) == None + plt.close('all') + + fig = plt.figure() + ax = fig.add_subplot(1,1,1) + ax.plot([1,2,3]) + plt.draw() + png, md = pt.retina_figure(fig) + assert png.startswith(_PNG) + assert "width" in md + assert "height" in md + + +_fmt_mime_map = { + 'png': 'image/png', + 'jpeg': 'image/jpeg', + 'pdf': 'application/pdf', + 'retina': 'image/png', + 'svg': 'image/svg+xml', +} + +def test_select_figure_formats_str(): + ip = get_ipython() + for fmt, active_mime in _fmt_mime_map.items(): + pt.select_figure_formats(ip, fmt) + for mime, f in ip.display_formatter.formatters.items(): + if mime == active_mime: + assert Figure in f + else: + assert Figure not in f + +def test_select_figure_formats_kwargs(): + ip = get_ipython() + kwargs = dict(bbox_inches="tight") + pt.select_figure_formats(ip, "png", **kwargs) + formatter = ip.display_formatter.formatters["image/png"] + f = formatter.lookup_by_type(Figure) + cell = f.keywords + expected = kwargs + expected["base64"] = True + expected["fmt"] = "png" + assert cell == expected + + # check that the formatter doesn't raise + fig = plt.figure() + ax = fig.add_subplot(1,1,1) + ax.plot([1,2,3]) + plt.draw() + formatter.enabled = True + png = formatter(fig) + assert isinstance(png, str) + png_bytes = a2b_base64(png) + assert png_bytes.startswith(_PNG) + +def test_select_figure_formats_set(): + ip = get_ipython() + for fmts in [ + {'png', 'svg'}, + ['png'], + ('jpeg', 'pdf', 'retina'), + {'svg'}, + ]: + active_mimes = {_fmt_mime_map[fmt] for fmt in fmts} + pt.select_figure_formats(ip, fmts) + for mime, f in ip.display_formatter.formatters.items(): + if mime in active_mimes: + assert Figure in f + else: + assert Figure not in f + +def test_select_figure_formats_bad(): + ip = get_ipython() + with pytest.raises(ValueError): + pt.select_figure_formats(ip, 'foo') + with pytest.raises(ValueError): + pt.select_figure_formats(ip, {'png', 'foo'}) + with pytest.raises(ValueError): + pt.select_figure_formats(ip, ['retina', 'pdf', 'bar', 'bad']) + +def test_import_pylab(): + ns = {} + pt.import_pylab(ns, import_all=False) + assert "plt" in ns + assert ns["np"] == np + + +class TestPylabSwitch(object): + class Shell(InteractiveShell): + def init_history(self): + """Sets up the command history, and starts regular autosaves.""" + self.config.HistoryManager.hist_file = ":memory:" + super().init_history() + + def enable_gui(self, gui): + pass + + def setup(self): + import matplotlib + def act_mpl(backend): + matplotlib.rcParams['backend'] = backend + + # Save rcParams since they get modified + self._saved_rcParams = matplotlib.rcParams + self._saved_rcParamsOrig = matplotlib.rcParamsOrig + matplotlib.rcParams = dict(backend='Qt4Agg') + matplotlib.rcParamsOrig = dict(backend='Qt4Agg') + + # Mock out functions + self._save_am = pt.activate_matplotlib + pt.activate_matplotlib = act_mpl + self._save_ip = pt.import_pylab + pt.import_pylab = lambda *a,**kw:None + self._save_cis = backend_inline.configure_inline_support + backend_inline.configure_inline_support = lambda *a, **kw: None + + def teardown(self): + pt.activate_matplotlib = self._save_am + pt.import_pylab = self._save_ip + backend_inline.configure_inline_support = self._save_cis + import matplotlib + matplotlib.rcParams = self._saved_rcParams + matplotlib.rcParamsOrig = self._saved_rcParamsOrig + + def test_qt(self): + + s = self.Shell() + gui, backend = s.enable_matplotlib(None) + assert gui == "qt" + assert s.pylab_gui_select == "qt" + + gui, backend = s.enable_matplotlib("inline") + assert gui == "inline" + assert s.pylab_gui_select == "qt" + + gui, backend = s.enable_matplotlib("qt") + assert gui == "qt" + assert s.pylab_gui_select == "qt" + + gui, backend = s.enable_matplotlib("inline") + assert gui == "inline" + assert s.pylab_gui_select == "qt" + + gui, backend = s.enable_matplotlib() + assert gui == "qt" + assert s.pylab_gui_select == "qt" + + def test_inline(self): + s = self.Shell() + gui, backend = s.enable_matplotlib("inline") + assert gui == "inline" + assert s.pylab_gui_select == None + + gui, backend = s.enable_matplotlib("inline") + assert gui == "inline" + assert s.pylab_gui_select == None + + gui, backend = s.enable_matplotlib("qt") + assert gui == "qt" + assert s.pylab_gui_select == "qt" + + def test_inline_twice(self): + "Using '%matplotlib inline' twice should not reset formatters" + + ip = self.Shell() + gui, backend = ip.enable_matplotlib("inline") + assert gui == "inline" + + fmts = {'png'} + active_mimes = {_fmt_mime_map[fmt] for fmt in fmts} + pt.select_figure_formats(ip, fmts) + + gui, backend = ip.enable_matplotlib("inline") + assert gui == "inline" + + for mime, f in ip.display_formatter.formatters.items(): + if mime in active_mimes: + assert Figure in f + else: + assert Figure not in f + + def test_qt_gtk(self): + s = self.Shell() + gui, backend = s.enable_matplotlib("qt") + assert gui == "qt" + assert s.pylab_gui_select == "qt" + + gui, backend = s.enable_matplotlib("gtk") + assert gui == "qt" + assert s.pylab_gui_select == "qt" + + +def test_no_gui_backends(): + for k in ['agg', 'svg', 'pdf', 'ps']: + assert k not in pt.backend2gui + + +def test_figure_no_canvas(): + fig = Figure() + fig.canvas = None + pt.print_figure(fig) diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_run.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_run.py new file mode 100644 index 000000000..9687786b4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_run.py @@ -0,0 +1,626 @@ +# encoding: utf-8 +"""Tests for code execution (%run and related), which is particularly tricky. + +Because of how %run manages namespaces, and the fact that we are trying here to +verify subtle object deletion and reference counting issues, the %run tests +will be kept in this separate file. This makes it easier to aggregate in one +place the tricks needed to handle it; most other magics are much easier to test +and we do so in a common test_magic file. + +Note that any test using `run -i` should make sure to do a `reset` afterwards, +as otherwise it may influence later tests. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + + +import functools +import os +import platform +import random +import string +import sys +import textwrap +import unittest +from os.path import join as pjoin +from unittest.mock import patch + +import pytest +from tempfile import TemporaryDirectory + +from IPython.core import debugger +from IPython.testing import decorators as dec +from IPython.testing import tools as tt +from IPython.utils.io import capture_output + + +def doctest_refbug(): + """Very nasty problem with references held by multiple runs of a script. + See: https://github.com/ipython/ipython/issues/141 + + In [1]: _ip.clear_main_mod_cache() + # random + + In [2]: %run refbug + + In [3]: call_f() + lowercased: hello + + In [4]: %run refbug + + In [5]: call_f() + lowercased: hello + lowercased: hello + """ + + +def doctest_run_builtins(): + r"""Check that %run doesn't damage __builtins__. + + In [1]: import tempfile + + In [2]: bid1 = id(__builtins__) + + In [3]: fname = tempfile.mkstemp('.py')[1] + + In [3]: f = open(fname, 'w', encoding='utf-8') + + In [4]: dummy= f.write('pass\n') + + In [5]: f.flush() + + In [6]: t1 = type(__builtins__) + + In [7]: %run $fname + + In [7]: f.close() + + In [8]: bid2 = id(__builtins__) + + In [9]: t2 = type(__builtins__) + + In [10]: t1 == t2 + Out[10]: True + + In [10]: bid1 == bid2 + Out[10]: True + + In [12]: try: + ....: os.unlink(fname) + ....: except: + ....: pass + ....: + """ + + +def doctest_run_option_parser(): + r"""Test option parser in %run. + + In [1]: %run print_argv.py + [] + + In [2]: %run print_argv.py print*.py + ['print_argv.py'] + + In [3]: %run -G print_argv.py print*.py + ['print*.py'] + + """ + + +@dec.skip_win32 +def doctest_run_option_parser_for_posix(): + r"""Test option parser in %run (Linux/OSX specific). + + You need double quote to escape glob in POSIX systems: + + In [1]: %run print_argv.py print\\*.py + ['print*.py'] + + You can't use quote to escape glob in POSIX systems: + + In [2]: %run print_argv.py 'print*.py' + ['print_argv.py'] + + """ + + +doctest_run_option_parser_for_posix.__skip_doctest__ = sys.platform == "win32" + + +@dec.skip_if_not_win32 +def doctest_run_option_parser_for_windows(): + r"""Test option parser in %run (Windows specific). + + In Windows, you can't escape ``*` `by backslash: + + In [1]: %run print_argv.py print\\*.py + ['print\\\\*.py'] + + You can use quote to escape glob: + + In [2]: %run print_argv.py 'print*.py' + ["'print*.py'"] + + """ + + +doctest_run_option_parser_for_windows.__skip_doctest__ = sys.platform != "win32" + + +def doctest_reset_del(): + """Test that resetting doesn't cause errors in __del__ methods. + + In [2]: class A(object): + ...: def __del__(self): + ...: print(str("Hi")) + ...: + + In [3]: a = A() + + In [4]: get_ipython().reset(); import gc; x = gc.collect(0) + Hi + + In [5]: 1+1 + Out[5]: 2 + """ + +# For some tests, it will be handy to organize them in a class with a common +# setup that makes a temp file + +class TestMagicRunPass(tt.TempFileMixin): + + def setUp(self): + content = "a = [1,2,3]\nb = 1" + self.mktmp(content) + + def run_tmpfile(self): + _ip = get_ipython() + # This fails on Windows if self.tmpfile.name has spaces or "~" in it. + # See below and ticket https://bugs.launchpad.net/bugs/366353 + _ip.run_line_magic("run", self.fname) + + def run_tmpfile_p(self): + _ip = get_ipython() + # This fails on Windows if self.tmpfile.name has spaces or "~" in it. + # See below and ticket https://bugs.launchpad.net/bugs/366353 + _ip.run_line_magic("run", "-p %s" % self.fname) + + def test_builtins_id(self): + """Check that %run doesn't damage __builtins__ """ + _ip = get_ipython() + # Test that the id of __builtins__ is not modified by %run + bid1 = id(_ip.user_ns['__builtins__']) + self.run_tmpfile() + bid2 = id(_ip.user_ns['__builtins__']) + assert bid1 == bid2 + + def test_builtins_type(self): + """Check that the type of __builtins__ doesn't change with %run. + + However, the above could pass if __builtins__ was already modified to + be a dict (it should be a module) by a previous use of %run. So we + also check explicitly that it really is a module: + """ + _ip = get_ipython() + self.run_tmpfile() + assert type(_ip.user_ns["__builtins__"]) == type(sys) + + def test_run_profile(self): + """Test that the option -p, which invokes the profiler, do not + crash by invoking execfile""" + self.run_tmpfile_p() + + def test_run_debug_twice(self): + # https://github.com/ipython/ipython/issues/10028 + _ip = get_ipython() + with tt.fake_input(["c"]): + _ip.run_line_magic("run", "-d %s" % self.fname) + with tt.fake_input(["c"]): + _ip.run_line_magic("run", "-d %s" % self.fname) + + def test_run_debug_twice_with_breakpoint(self): + """Make a valid python temp file.""" + _ip = get_ipython() + with tt.fake_input(["b 2", "c", "c"]): + _ip.run_line_magic("run", "-d %s" % self.fname) + + with tt.fake_input(["c"]): + with tt.AssertNotPrints("KeyError"): + _ip.run_line_magic("run", "-d %s" % self.fname) + + +class TestMagicRunSimple(tt.TempFileMixin): + + def test_simpledef(self): + """Test that simple class definitions work.""" + src = ("class foo: pass\n" + "def f(): return foo()") + self.mktmp(src) + _ip.run_line_magic("run", str(self.fname)) + _ip.run_cell("t = isinstance(f(), foo)") + assert _ip.user_ns["t"] is True + + @pytest.mark.xfail( + platform.python_implementation() == "PyPy", + reason="expecting __del__ call on exit is unreliable and doesn't happen on PyPy", + ) + def test_obj_del(self): + """Test that object's __del__ methods are called on exit.""" + src = ("class A(object):\n" + " def __del__(self):\n" + " print('object A deleted')\n" + "a = A()\n") + self.mktmp(src) + err = None + tt.ipexec_validate(self.fname, 'object A deleted', err) + + def test_aggressive_namespace_cleanup(self): + """Test that namespace cleanup is not too aggressive GH-238 + + Returning from another run magic deletes the namespace""" + # see ticket https://github.com/ipython/ipython/issues/238 + + with tt.TempFileMixin() as empty: + empty.mktmp("") + # On Windows, the filename will have \users in it, so we need to use the + # repr so that the \u becomes \\u. + src = ( + "ip = get_ipython()\n" + "for i in range(5):\n" + " try:\n" + " ip.magic(%r)\n" + " except NameError as e:\n" + " print(i)\n" + " break\n" % ("run " + empty.fname) + ) + self.mktmp(src) + _ip.run_line_magic("run", str(self.fname)) + _ip.run_cell("ip == get_ipython()") + assert _ip.user_ns["i"] == 4 + + def test_run_second(self): + """Test that running a second file doesn't clobber the first, gh-3547""" + self.mktmp("avar = 1\n" "def afunc():\n" " return avar\n") + + with tt.TempFileMixin() as empty: + empty.mktmp("") + + _ip.run_line_magic("run", self.fname) + _ip.run_line_magic("run", empty.fname) + assert _ip.user_ns["afunc"]() == 1 + + def test_tclass(self): + mydir = os.path.dirname(__file__) + tc = os.path.join(mydir, "tclass") + src = f"""\ +import gc +%run "{tc}" C-first +gc.collect(0) +%run "{tc}" C-second +gc.collect(0) +%run "{tc}" C-third +gc.collect(0) +%reset -f +""" + self.mktmp(src, ".ipy") + out = """\ +ARGV 1-: ['C-first'] +ARGV 1-: ['C-second'] +tclass.py: deleting object: C-first +ARGV 1-: ['C-third'] +tclass.py: deleting object: C-second +tclass.py: deleting object: C-third +""" + err = None + tt.ipexec_validate(self.fname, out, err) + + def test_run_i_after_reset(self): + """Check that %run -i still works after %reset (gh-693)""" + src = "yy = zz\n" + self.mktmp(src) + _ip.run_cell("zz = 23") + try: + _ip.run_line_magic("run", "-i %s" % self.fname) + assert _ip.user_ns["yy"] == 23 + finally: + _ip.run_line_magic("reset", "-f") + + _ip.run_cell("zz = 23") + try: + _ip.run_line_magic("run", "-i %s" % self.fname) + assert _ip.user_ns["yy"] == 23 + finally: + _ip.run_line_magic("reset", "-f") + + def test_unicode(self): + """Check that files in odd encodings are accepted.""" + mydir = os.path.dirname(__file__) + na = os.path.join(mydir, "nonascii.py") + _ip.magic('run "%s"' % na) + assert _ip.user_ns["u"] == "Ўт№Ф" + + def test_run_py_file_attribute(self): + """Test handling of `__file__` attribute in `%run .py`.""" + src = "t = __file__\n" + self.mktmp(src) + _missing = object() + file1 = _ip.user_ns.get("__file__", _missing) + _ip.run_line_magic("run", self.fname) + file2 = _ip.user_ns.get("__file__", _missing) + + # Check that __file__ was equal to the filename in the script's + # namespace. + assert _ip.user_ns["t"] == self.fname + + # Check that __file__ was not leaked back into user_ns. + assert file1 == file2 + + def test_run_ipy_file_attribute(self): + """Test handling of `__file__` attribute in `%run `.""" + src = "t = __file__\n" + self.mktmp(src, ext='.ipy') + _missing = object() + file1 = _ip.user_ns.get("__file__", _missing) + _ip.run_line_magic("run", self.fname) + file2 = _ip.user_ns.get("__file__", _missing) + + # Check that __file__ was equal to the filename in the script's + # namespace. + assert _ip.user_ns["t"] == self.fname + + # Check that __file__ was not leaked back into user_ns. + assert file1 == file2 + + def test_run_formatting(self): + """ Test that %run -t -N does not raise a TypeError for N > 1.""" + src = "pass" + self.mktmp(src) + _ip.run_line_magic("run", "-t -N 1 %s" % self.fname) + _ip.run_line_magic("run", "-t -N 10 %s" % self.fname) + + def test_ignore_sys_exit(self): + """Test the -e option to ignore sys.exit()""" + src = "import sys; sys.exit(1)" + self.mktmp(src) + with tt.AssertPrints("SystemExit"): + _ip.run_line_magic("run", self.fname) + + with tt.AssertNotPrints("SystemExit"): + _ip.run_line_magic("run", "-e %s" % self.fname) + + def test_run_nb(self): + """Test %run notebook.ipynb""" + pytest.importorskip("nbformat") + from nbformat import v4, writes + nb = v4.new_notebook( + cells=[ + v4.new_markdown_cell("The Ultimate Question of Everything"), + v4.new_code_cell("answer=42") + ] + ) + src = writes(nb, version=4) + self.mktmp(src, ext='.ipynb') + + _ip.run_line_magic("run", self.fname) + + assert _ip.user_ns["answer"] == 42 + + def test_run_nb_error(self): + """Test %run notebook.ipynb error""" + pytest.importorskip("nbformat") + from nbformat import v4, writes + + # %run when a file name isn't provided + pytest.raises(Exception, _ip.magic, "run") + + # %run when a file doesn't exist + pytest.raises(Exception, _ip.magic, "run foobar.ipynb") + + # %run on a notebook with an error + nb = v4.new_notebook( + cells=[ + v4.new_code_cell("0/0") + ] + ) + src = writes(nb, version=4) + self.mktmp(src, ext='.ipynb') + pytest.raises(Exception, _ip.magic, "run %s" % self.fname) + + def test_file_options(self): + src = ('import sys\n' + 'a = " ".join(sys.argv[1:])\n') + self.mktmp(src) + test_opts = "-x 3 --verbose" + _ip.run_line_magic("run", "{0} {1}".format(self.fname, test_opts)) + assert _ip.user_ns["a"] == test_opts + + +class TestMagicRunWithPackage(unittest.TestCase): + + def writefile(self, name, content): + path = os.path.join(self.tempdir.name, name) + d = os.path.dirname(path) + if not os.path.isdir(d): + os.makedirs(d) + with open(path, "w", encoding="utf-8") as f: + f.write(textwrap.dedent(content)) + + def setUp(self): + self.package = package = 'tmp{0}'.format(''.join([random.choice(string.ascii_letters) for i in range(10)])) + """Temporary (probably) valid python package name.""" + + self.value = int(random.random() * 10000) + + self.tempdir = TemporaryDirectory() + self.__orig_cwd = os.getcwd() + sys.path.insert(0, self.tempdir.name) + + self.writefile(os.path.join(package, '__init__.py'), '') + self.writefile(os.path.join(package, 'sub.py'), """ + x = {0!r} + """.format(self.value)) + self.writefile(os.path.join(package, 'relative.py'), """ + from .sub import x + """) + self.writefile(os.path.join(package, 'absolute.py'), """ + from {0}.sub import x + """.format(package)) + self.writefile(os.path.join(package, 'args.py'), """ + import sys + a = " ".join(sys.argv[1:]) + """.format(package)) + + def tearDown(self): + os.chdir(self.__orig_cwd) + sys.path[:] = [p for p in sys.path if p != self.tempdir.name] + self.tempdir.cleanup() + + def check_run_submodule(self, submodule, opts=""): + _ip.user_ns.pop("x", None) + _ip.run_line_magic( + "run", "{2} -m {0}.{1}".format(self.package, submodule, opts) + ) + self.assertEqual( + _ip.user_ns["x"], + self.value, + "Variable `x` is not loaded from module `{0}`.".format(submodule), + ) + + def test_run_submodule_with_absolute_import(self): + self.check_run_submodule('absolute') + + def test_run_submodule_with_relative_import(self): + """Run submodule that has a relative import statement (#2727).""" + self.check_run_submodule('relative') + + def test_prun_submodule_with_absolute_import(self): + self.check_run_submodule('absolute', '-p') + + def test_prun_submodule_with_relative_import(self): + self.check_run_submodule('relative', '-p') + + def with_fake_debugger(func): + @functools.wraps(func) + def wrapper(*args, **kwds): + with patch.object(debugger.Pdb, 'run', staticmethod(eval)): + return func(*args, **kwds) + return wrapper + + @with_fake_debugger + def test_debug_run_submodule_with_absolute_import(self): + self.check_run_submodule('absolute', '-d') + + @with_fake_debugger + def test_debug_run_submodule_with_relative_import(self): + self.check_run_submodule('relative', '-d') + + def test_module_options(self): + _ip.user_ns.pop("a", None) + test_opts = "-x abc -m test" + _ip.run_line_magic("run", "-m {0}.args {1}".format(self.package, test_opts)) + assert _ip.user_ns["a"] == test_opts + + def test_module_options_with_separator(self): + _ip.user_ns.pop("a", None) + test_opts = "-x abc -m test" + _ip.run_line_magic("run", "-m {0}.args -- {1}".format(self.package, test_opts)) + assert _ip.user_ns["a"] == test_opts + + +def test_run__name__(): + with TemporaryDirectory() as td: + path = pjoin(td, "foo.py") + with open(path, "w", encoding="utf-8") as f: + f.write("q = __name__") + + _ip.user_ns.pop("q", None) + _ip.run_line_magic("run", "{}".format(path)) + assert _ip.user_ns.pop("q") == "__main__" + + _ip.run_line_magic("run", "-n {}".format(path)) + assert _ip.user_ns.pop("q") == "foo" + + try: + _ip.run_line_magic("run", "-i -n {}".format(path)) + assert _ip.user_ns.pop("q") == "foo" + finally: + _ip.run_line_magic("reset", "-f") + + +def test_run_tb(): + """Test traceback offset in %run""" + with TemporaryDirectory() as td: + path = pjoin(td, "foo.py") + with open(path, "w", encoding="utf-8") as f: + f.write( + "\n".join( + [ + "def foo():", + " return bar()", + "def bar():", + " raise RuntimeError('hello!')", + "foo()", + ] + ) + ) + with capture_output() as io: + _ip.run_line_magic("run", "{}".format(path)) + out = io.stdout + assert "execfile" not in out + assert "RuntimeError" in out + assert out.count("---->") == 3 + del ip.user_ns['bar'] + del ip.user_ns['foo'] + + +def test_multiprocessing_run(): + """Set we can run mutiprocesgin without messing up up main namespace + + Note that import `nose.tools as nt` mdify the value s + sys.module['__mp_main__'] so we need to temporarily set it to None to test + the issue. + """ + with TemporaryDirectory() as td: + mpm = sys.modules.get('__mp_main__') + sys.modules['__mp_main__'] = None + try: + path = pjoin(td, "test.py") + with open(path, "w", encoding="utf-8") as f: + f.write("import multiprocessing\nprint('hoy')") + with capture_output() as io: + _ip.run_line_magic('run', path) + _ip.run_cell("i_m_undefined") + out = io.stdout + assert "hoy" in out + assert "AttributeError" not in out + assert "NameError" in out + assert out.count("---->") == 1 + except: + raise + finally: + sys.modules['__mp_main__'] = mpm + + +def test_script_tb(): + """Test traceback offset in `ipython script.py`""" + with TemporaryDirectory() as td: + path = pjoin(td, "foo.py") + with open(path, "w", encoding="utf-8") as f: + f.write( + "\n".join( + [ + "def foo():", + " return bar()", + "def bar():", + " raise RuntimeError('hello!')", + "foo()", + ] + ) + ) + out, err = tt.ipexec(path) + assert "execfile" not in out + assert "RuntimeError" in out + assert out.count("---->") == 3 diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_shellapp.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_shellapp.py new file mode 100644 index 000000000..9f4f87b81 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_shellapp.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +"""Tests for shellapp module. + +Authors +------- +* Bradley Froehle +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2012 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import unittest + +from IPython.testing import decorators as dec +from IPython.testing import tools as tt + + +class TestFileToRun(tt.TempFileMixin, unittest.TestCase): + """Test the behavior of the file_to_run parameter.""" + + def test_py_script_file_attribute(self): + """Test that `__file__` is set when running `ipython file.py`""" + src = "print(__file__)\n" + self.mktmp(src) + + err = None + tt.ipexec_validate(self.fname, self.fname, err) + + def test_ipy_script_file_attribute(self): + """Test that `__file__` is set when running `ipython file.ipy`""" + src = "print(__file__)\n" + self.mktmp(src, ext='.ipy') + + err = None + tt.ipexec_validate(self.fname, self.fname, err) + + # The commands option to ipexec_validate doesn't work on Windows, and it + # doesn't seem worth fixing + @dec.skip_win32 + def test_py_script_file_attribute_interactively(self): + """Test that `__file__` is not set after `ipython -i file.py`""" + src = "True\n" + self.mktmp(src) + + out, err = tt.ipexec( + self.fname, + options=["-i"], + commands=['"__file__" in globals()', "print(123)", "exit()"], + ) + assert "False" in out, f"Subprocess stderr:\n{err}\n-----" diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_splitinput.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_splitinput.py new file mode 100644 index 000000000..1462e7fa0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_splitinput.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +from IPython.core.splitinput import split_user_input, LineInfo +from IPython.testing import tools as tt + +tests = [ + ("x=1", ("", "", "x", "=1")), + ("?", ("", "?", "", "")), + ("??", ("", "??", "", "")), + (" ?", (" ", "?", "", "")), + (" ??", (" ", "??", "", "")), + ("??x", ("", "??", "x", "")), + ("?x=1", ("", "?", "x", "=1")), + ("!ls", ("", "!", "ls", "")), + (" !ls", (" ", "!", "ls", "")), + ("!!ls", ("", "!!", "ls", "")), + (" !!ls", (" ", "!!", "ls", "")), + (",ls", ("", ",", "ls", "")), + (";ls", ("", ";", "ls", "")), + (" ;ls", (" ", ";", "ls", "")), + ("f.g(x)", ("", "", "f.g", "(x)")), + ("f.g (x)", ("", "", "f.g", "(x)")), + ("?%hist1", ("", "?", "%hist1", "")), + ("?%%hist2", ("", "?", "%%hist2", "")), + ("??%hist3", ("", "??", "%hist3", "")), + ("??%%hist4", ("", "??", "%%hist4", "")), + ("?x*", ("", "?", "x*", "")), +] +tests.append(("Pérez Fernando", ("", "", "Pérez", "Fernando"))) + + +def test_split_user_input(): + return tt.check_pairs(split_user_input, tests) + + +def test_LineInfo(): + """Simple test for LineInfo construction and str()""" + linfo = LineInfo(" %cd /home") + assert str(linfo) == "LineInfo [ |%|cd|/home]" diff --git a/myenv/lib/python3.10/site-packages/IPython/core/tests/test_ultratb.py b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_ultratb.py new file mode 100644 index 000000000..349d2ac9e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/tests/test_ultratb.py @@ -0,0 +1,408 @@ +# encoding: utf-8 +"""Tests for IPython.core.ultratb +""" +import io +import os.path +import platform +import re +import sys +import traceback +import unittest +from textwrap import dedent + +from tempfile import TemporaryDirectory + +from IPython.core.ultratb import ColorTB, VerboseTB +from IPython.testing import tools as tt +from IPython.testing.decorators import onlyif_unicode_paths +from IPython.utils.syspathcontext import prepended_to_syspath + +file_1 = """1 +2 +3 +def f(): + 1/0 +""" + +file_2 = """def f(): + 1/0 +""" + + +def recursionlimit(frames): + """ + decorator to set the recursion limit temporarily + """ + + def inner(test_function): + def wrapper(*args, **kwargs): + rl = sys.getrecursionlimit() + sys.setrecursionlimit(frames) + try: + return test_function(*args, **kwargs) + finally: + sys.setrecursionlimit(rl) + + return wrapper + + return inner + + +class ChangedPyFileTest(unittest.TestCase): + def test_changing_py_file(self): + """Traceback produced if the line where the error occurred is missing? + + https://github.com/ipython/ipython/issues/1456 + """ + with TemporaryDirectory() as td: + fname = os.path.join(td, "foo.py") + with open(fname, "w", encoding="utf-8") as f: + f.write(file_1) + + with prepended_to_syspath(td): + ip.run_cell("import foo") + + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + + # Make the file shorter, so the line of the error is missing. + with open(fname, "w", encoding="utf-8") as f: + f.write(file_2) + + # For some reason, this was failing on the *second* call after + # changing the file, so we call f() twice. + with tt.AssertNotPrints("Internal Python error", channel='stderr'): + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + +iso_8859_5_file = u'''# coding: iso-8859-5 + +def fail(): + """дбИЖ""" + 1/0 # дбИЖ +''' + +class NonAsciiTest(unittest.TestCase): + @onlyif_unicode_paths + def test_nonascii_path(self): + # Non-ascii directory name as well. + with TemporaryDirectory(suffix=u'é') as td: + fname = os.path.join(td, u"fooé.py") + with open(fname, "w", encoding="utf-8") as f: + f.write(file_1) + + with prepended_to_syspath(td): + ip.run_cell("import foo") + + with tt.AssertPrints("ZeroDivisionError"): + ip.run_cell("foo.f()") + + def test_iso8859_5(self): + with TemporaryDirectory() as td: + fname = os.path.join(td, 'dfghjkl.py') + + with io.open(fname, 'w', encoding='iso-8859-5') as f: + f.write(iso_8859_5_file) + + with prepended_to_syspath(td): + ip.run_cell("from dfghjkl import fail") + + with tt.AssertPrints("ZeroDivisionError"): + with tt.AssertPrints(u'дбИЖ', suppress=False): + ip.run_cell('fail()') + + def test_nonascii_msg(self): + cell = u"raise Exception('é')" + expected = u"Exception('é')" + ip.run_cell("%xmode plain") + with tt.AssertPrints(expected): + ip.run_cell(cell) + + ip.run_cell("%xmode verbose") + with tt.AssertPrints(expected): + ip.run_cell(cell) + + ip.run_cell("%xmode context") + with tt.AssertPrints(expected): + ip.run_cell(cell) + + ip.run_cell("%xmode minimal") + with tt.AssertPrints(u"Exception: é"): + ip.run_cell(cell) + + # Put this back into Context mode for later tests. + ip.run_cell("%xmode context") + +class NestedGenExprTestCase(unittest.TestCase): + """ + Regression test for the following issues: + https://github.com/ipython/ipython/issues/8293 + https://github.com/ipython/ipython/issues/8205 + """ + def test_nested_genexpr(self): + code = dedent( + """\ + class SpecificException(Exception): + pass + + def foo(x): + raise SpecificException("Success!") + + sum(sum(foo(x) for _ in [0]) for x in [0]) + """ + ) + with tt.AssertPrints('SpecificException: Success!', suppress=False): + ip.run_cell(code) + + +indentationerror_file = """if True: +zoon() +""" + +class IndentationErrorTest(unittest.TestCase): + def test_indentationerror_shows_line(self): + # See issue gh-2398 + with tt.AssertPrints("IndentationError"): + with tt.AssertPrints("zoon()", suppress=False): + ip.run_cell(indentationerror_file) + + with TemporaryDirectory() as td: + fname = os.path.join(td, "foo.py") + with open(fname, "w", encoding="utf-8") as f: + f.write(indentationerror_file) + + with tt.AssertPrints("IndentationError"): + with tt.AssertPrints("zoon()", suppress=False): + ip.magic('run %s' % fname) + +se_file_1 = """1 +2 +7/ +""" + +se_file_2 = """7/ +""" + +class SyntaxErrorTest(unittest.TestCase): + + def test_syntaxerror_no_stacktrace_at_compile_time(self): + syntax_error_at_compile_time = """ +def foo(): + .. +""" + with tt.AssertPrints("SyntaxError"): + ip.run_cell(syntax_error_at_compile_time) + + with tt.AssertNotPrints("foo()"): + ip.run_cell(syntax_error_at_compile_time) + + def test_syntaxerror_stacktrace_when_running_compiled_code(self): + syntax_error_at_runtime = """ +def foo(): + eval("..") + +def bar(): + foo() + +bar() +""" + with tt.AssertPrints("SyntaxError"): + ip.run_cell(syntax_error_at_runtime) + # Assert syntax error during runtime generate stacktrace + with tt.AssertPrints(["foo()", "bar()"]): + ip.run_cell(syntax_error_at_runtime) + del ip.user_ns['bar'] + del ip.user_ns['foo'] + + def test_changing_py_file(self): + with TemporaryDirectory() as td: + fname = os.path.join(td, "foo.py") + with open(fname, "w", encoding="utf-8") as f: + f.write(se_file_1) + + with tt.AssertPrints(["7/", "SyntaxError"]): + ip.magic("run " + fname) + + # Modify the file + with open(fname, "w", encoding="utf-8") as f: + f.write(se_file_2) + + # The SyntaxError should point to the correct line + with tt.AssertPrints(["7/", "SyntaxError"]): + ip.magic("run " + fname) + + def test_non_syntaxerror(self): + # SyntaxTB may be called with an error other than a SyntaxError + # See e.g. gh-4361 + try: + raise ValueError('QWERTY') + except ValueError: + with tt.AssertPrints('QWERTY'): + ip.showsyntaxerror() + +import sys + +if sys.version_info < (3, 9) and platform.python_implementation() != "PyPy": + """ + New 3.9 Pgen Parser does not raise Memory error, except on failed malloc. + """ + class MemoryErrorTest(unittest.TestCase): + def test_memoryerror(self): + memoryerror_code = "(" * 200 + ")" * 200 + with tt.AssertPrints("MemoryError"): + ip.run_cell(memoryerror_code) + + +class Python3ChainedExceptionsTest(unittest.TestCase): + DIRECT_CAUSE_ERROR_CODE = """ +try: + x = 1 + 2 + print(not_defined_here) +except Exception as e: + x += 55 + x - 1 + y = {} + raise KeyError('uh') from e + """ + + EXCEPTION_DURING_HANDLING_CODE = """ +try: + x = 1 + 2 + print(not_defined_here) +except Exception as e: + x += 55 + x - 1 + y = {} + raise KeyError('uh') + """ + + SUPPRESS_CHAINING_CODE = """ +try: + 1/0 +except Exception: + raise ValueError("Yikes") from None + """ + + def test_direct_cause_error(self): + with tt.AssertPrints(["KeyError", "NameError", "direct cause"]): + ip.run_cell(self.DIRECT_CAUSE_ERROR_CODE) + + def test_exception_during_handling_error(self): + with tt.AssertPrints(["KeyError", "NameError", "During handling"]): + ip.run_cell(self.EXCEPTION_DURING_HANDLING_CODE) + + def test_suppress_exception_chaining(self): + with tt.AssertNotPrints("ZeroDivisionError"), \ + tt.AssertPrints("ValueError", suppress=False): + ip.run_cell(self.SUPPRESS_CHAINING_CODE) + + def test_plain_direct_cause_error(self): + with tt.AssertPrints(["KeyError", "NameError", "direct cause"]): + ip.run_cell("%xmode Plain") + ip.run_cell(self.DIRECT_CAUSE_ERROR_CODE) + ip.run_cell("%xmode Verbose") + + def test_plain_exception_during_handling_error(self): + with tt.AssertPrints(["KeyError", "NameError", "During handling"]): + ip.run_cell("%xmode Plain") + ip.run_cell(self.EXCEPTION_DURING_HANDLING_CODE) + ip.run_cell("%xmode Verbose") + + def test_plain_suppress_exception_chaining(self): + with tt.AssertNotPrints("ZeroDivisionError"), \ + tt.AssertPrints("ValueError", suppress=False): + ip.run_cell("%xmode Plain") + ip.run_cell(self.SUPPRESS_CHAINING_CODE) + ip.run_cell("%xmode Verbose") + + +class RecursionTest(unittest.TestCase): + DEFINITIONS = """ +def non_recurs(): + 1/0 + +def r1(): + r1() + +def r3a(): + r3b() + +def r3b(): + r3c() + +def r3c(): + r3a() + +def r3o1(): + r3a() + +def r3o2(): + r3o1() +""" + def setUp(self): + ip.run_cell(self.DEFINITIONS) + + def test_no_recursion(self): + with tt.AssertNotPrints("skipping similar frames"): + ip.run_cell("non_recurs()") + + @recursionlimit(200) + def test_recursion_one_frame(self): + with tt.AssertPrints(re.compile( + r"\[\.\.\. skipping similar frames: r1 at line 5 \(\d{2,3} times\)\]") + ): + ip.run_cell("r1()") + + @recursionlimit(160) + def test_recursion_three_frames(self): + with tt.AssertPrints("[... skipping similar frames: "), \ + tt.AssertPrints(re.compile(r"r3a at line 8 \(\d{2} times\)"), suppress=False), \ + tt.AssertPrints(re.compile(r"r3b at line 11 \(\d{2} times\)"), suppress=False), \ + tt.AssertPrints(re.compile(r"r3c at line 14 \(\d{2} times\)"), suppress=False): + ip.run_cell("r3o2()") + + +#---------------------------------------------------------------------------- + +# module testing (minimal) +def test_handlers(): + def spam(c, d_e): + (d, e) = d_e + x = c + d + y = c * d + foo(x, y) + + def foo(a, b, bar=1): + eggs(a, b + bar) + + def eggs(f, g, z=globals()): + h = f + g + i = f - g + return h / i + + buff = io.StringIO() + + buff.write('') + buff.write('*** Before ***') + try: + buff.write(spam(1, (2, 3))) + except: + traceback.print_exc(file=buff) + + handler = ColorTB(ostream=buff) + buff.write('*** ColorTB ***') + try: + buff.write(spam(1, (2, 3))) + except: + handler(*sys.exc_info()) + buff.write('') + + handler = VerboseTB(ostream=buff) + buff.write('*** VerboseTB ***') + try: + buff.write(spam(1, (2, 3))) + except: + handler(*sys.exc_info()) + buff.write('') diff --git a/myenv/lib/python3.10/site-packages/IPython/core/ultratb.py b/myenv/lib/python3.10/site-packages/IPython/core/ultratb.py new file mode 100644 index 000000000..18eff2708 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/ultratb.py @@ -0,0 +1,1207 @@ +# -*- coding: utf-8 -*- +""" +Verbose and colourful traceback formatting. + +**ColorTB** + +I've always found it a bit hard to visually parse tracebacks in Python. The +ColorTB class is a solution to that problem. It colors the different parts of a +traceback in a manner similar to what you would expect from a syntax-highlighting +text editor. + +Installation instructions for ColorTB:: + + import sys,ultratb + sys.excepthook = ultratb.ColorTB() + +**VerboseTB** + +I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds +of useful info when a traceback occurs. Ping originally had it spit out HTML +and intended it for CGI programmers, but why should they have all the fun? I +altered it to spit out colored text to the terminal. It's a bit overwhelming, +but kind of neat, and maybe useful for long-running programs that you believe +are bug-free. If a crash *does* occur in that type of program you want details. +Give it a shot--you'll love it or you'll hate it. + +.. note:: + + The Verbose mode prints the variables currently visible where the exception + happened (shortening their strings if too long). This can potentially be + very slow, if you happen to have a huge data structure whose string + representation is complex to compute. Your computer may appear to freeze for + a while with cpu usage at 100%. If this occurs, you can cancel the traceback + with Ctrl-C (maybe hitting it more than once). + + If you encounter this kind of situation often, you may want to use the + Verbose_novars mode instead of the regular Verbose, which avoids formatting + variables (but otherwise includes the information and context given by + Verbose). + +.. note:: + + The verbose mode print all variables in the stack, which means it can + potentially leak sensitive information like access keys, or unencrypted + password. + +Installation instructions for VerboseTB:: + + import sys,ultratb + sys.excepthook = ultratb.VerboseTB() + +Note: Much of the code in this module was lifted verbatim from the standard +library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. + +Color schemes +------------- + +The colors are defined in the class TBTools through the use of the +ColorSchemeTable class. Currently the following exist: + + - NoColor: allows all of this module to be used in any terminal (the color + escapes are just dummy blank strings). + + - Linux: is meant to look good in a terminal like the Linux console (black + or very dark background). + + - LightBG: similar to Linux but swaps dark/light colors to be more readable + in light background terminals. + + - Neutral: a neutral color scheme that should be readable on both light and + dark background + +You can implement other color schemes easily, the syntax is fairly +self-explanatory. Please send back new schemes you develop to the author for +possible inclusion in future releases. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.core.ultratb + :parts: 3 +""" + +#***************************************************************************** +# Copyright (C) 2001 Nathaniel Gray +# Copyright (C) 2001-2004 Fernando Perez +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + + +import inspect +import linecache +import pydoc +import sys +import time +import traceback +from types import TracebackType +from typing import Tuple, List, Any, Optional + +import stack_data +from pygments.formatters.terminal256 import Terminal256Formatter +from pygments.styles import get_style_by_name + +# IPython's own modules +from IPython import get_ipython +from IPython.core import debugger +from IPython.core.display_trap import DisplayTrap +from IPython.core.excolors import exception_colors +from IPython.utils import path as util_path +from IPython.utils import py3compat +from IPython.utils.terminal import get_terminal_size + +import IPython.utils.colorable as colorable + +# Globals +# amount of space to put line numbers before verbose tracebacks +INDENT_SIZE = 8 + +# Default color scheme. This is used, for example, by the traceback +# formatter. When running in an actual IPython instance, the user's rc.colors +# value is used, but having a module global makes this functionality available +# to users of ultratb who are NOT running inside ipython. +DEFAULT_SCHEME = 'NoColor' + +# --------------------------------------------------------------------------- +# Code begins + +# Helper function -- largely belongs to VerboseTB, but we need the same +# functionality to produce a pseudo verbose TB for SyntaxErrors, so that they +# can be recognized properly by ipython.el's py-traceback-line-re +# (SyntaxErrors have to be treated specially because they have no traceback) + + +def _format_traceback_lines(lines, Colors, has_colors: bool, lvals): + """ + Format tracebacks lines with pointing arrow, leading numbers... + + Parameters + ---------- + lines : list[Line] + Colors + ColorScheme used. + lvals : str + Values of local variables, already colored, to inject just after the error line. + """ + numbers_width = INDENT_SIZE - 1 + res = [] + + for stack_line in lines: + if stack_line is stack_data.LINE_GAP: + res.append('%s (...)%s\n' % (Colors.linenoEm, Colors.Normal)) + continue + + line = stack_line.render(pygmented=has_colors).rstrip('\n') + '\n' + lineno = stack_line.lineno + if stack_line.is_current: + # This is the line with the error + pad = numbers_width - len(str(lineno)) + num = '%s%s' % (debugger.make_arrow(pad), str(lineno)) + start_color = Colors.linenoEm + else: + num = '%*s' % (numbers_width, lineno) + start_color = Colors.lineno + + line = '%s%s%s %s' % (start_color, num, Colors.Normal, line) + + res.append(line) + if lvals and stack_line.is_current: + res.append(lvals + '\n') + return res + + +def _format_filename(file, ColorFilename, ColorNormal, *, lineno=None): + """ + Format filename lines with custom formatting from caching compiler or `File *.py` by default + + Parameters + ---------- + file : str + ColorFilename + ColorScheme's filename coloring to be used. + ColorNormal + ColorScheme's normal coloring to be used. + """ + ipinst = get_ipython() + if ( + ipinst is not None + and (data := ipinst.compile.format_code_name(file)) is not None + ): + label, name = data + if lineno is None: + tpl_link = f"{{label}} {ColorFilename}{{name}}{ColorNormal}" + else: + tpl_link = ( + f"{{label}} {ColorFilename}{{name}}, line {{lineno}}{ColorNormal}" + ) + else: + label = "File" + name = util_path.compress_user( + py3compat.cast_unicode(file, util_path.fs_encoding) + ) + if lineno is None: + tpl_link = f"{{label}} {ColorFilename}{{name}}{ColorNormal}" + else: + # can we make this the more friendly ", line {{lineno}}", or do we need to preserve the formatting with the colon? + tpl_link = f"{{label}} {ColorFilename}{{name}}:{{lineno}}{ColorNormal}" + + return tpl_link.format(label=label, name=name, lineno=lineno) + +#--------------------------------------------------------------------------- +# Module classes +class TBTools(colorable.Colorable): + """Basic tools used by all traceback printer classes.""" + + # Number of frames to skip when reporting tracebacks + tb_offset = 0 + + def __init__( + self, + color_scheme="NoColor", + call_pdb=False, + ostream=None, + parent=None, + config=None, + *, + debugger_cls=None, + ): + # Whether to call the interactive pdb debugger after printing + # tracebacks or not + super(TBTools, self).__init__(parent=parent, config=config) + self.call_pdb = call_pdb + + # Output stream to write to. Note that we store the original value in + # a private attribute and then make the public ostream a property, so + # that we can delay accessing sys.stdout until runtime. The way + # things are written now, the sys.stdout object is dynamically managed + # so a reference to it should NEVER be stored statically. This + # property approach confines this detail to a single location, and all + # subclasses can simply access self.ostream for writing. + self._ostream = ostream + + # Create color table + self.color_scheme_table = exception_colors() + + self.set_colors(color_scheme) + self.old_scheme = color_scheme # save initial value for toggles + self.debugger_cls = debugger_cls or debugger.Pdb + + if call_pdb: + self.pdb = self.debugger_cls() + else: + self.pdb = None + + def _get_ostream(self): + """Output stream that exceptions are written to. + + Valid values are: + + - None: the default, which means that IPython will dynamically resolve + to sys.stdout. This ensures compatibility with most tools, including + Windows (where plain stdout doesn't recognize ANSI escapes). + + - Any object with 'write' and 'flush' attributes. + """ + return sys.stdout if self._ostream is None else self._ostream + + def _set_ostream(self, val): + assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush')) + self._ostream = val + + ostream = property(_get_ostream, _set_ostream) + + @staticmethod + def _get_chained_exception(exception_value): + cause = getattr(exception_value, "__cause__", None) + if cause: + return cause + if getattr(exception_value, "__suppress_context__", False): + return None + return getattr(exception_value, "__context__", None) + + def get_parts_of_chained_exception( + self, evalue + ) -> Optional[Tuple[type, BaseException, TracebackType]]: + + chained_evalue = self._get_chained_exception(evalue) + + if chained_evalue: + return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__ + return None + + def prepare_chained_exception_message(self, cause) -> List[Any]: + direct_cause = "\nThe above exception was the direct cause of the following exception:\n" + exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n" + + if cause: + message = [[direct_cause]] + else: + message = [[exception_during_handling]] + return message + + @property + def has_colors(self) -> bool: + return self.color_scheme_table.active_scheme_name.lower() != "nocolor" + + def set_colors(self, *args, **kw): + """Shorthand access to the color table scheme selector method.""" + + # Set own color table + self.color_scheme_table.set_active_scheme(*args, **kw) + # for convenience, set Colors to the active scheme + self.Colors = self.color_scheme_table.active_colors + # Also set colors of debugger + if hasattr(self, 'pdb') and self.pdb is not None: + self.pdb.set_colors(*args, **kw) + + def color_toggle(self): + """Toggle between the currently active color scheme and NoColor.""" + + if self.color_scheme_table.active_scheme_name == 'NoColor': + self.color_scheme_table.set_active_scheme(self.old_scheme) + self.Colors = self.color_scheme_table.active_colors + else: + self.old_scheme = self.color_scheme_table.active_scheme_name + self.color_scheme_table.set_active_scheme('NoColor') + self.Colors = self.color_scheme_table.active_colors + + def stb2text(self, stb): + """Convert a structured traceback (a list) to a string.""" + return '\n'.join(stb) + + def text(self, etype, value, tb, tb_offset: Optional[int] = None, context=5): + """Return formatted traceback. + + Subclasses may override this if they add extra arguments. + """ + tb_list = self.structured_traceback(etype, value, tb, + tb_offset, context) + return self.stb2text(tb_list) + + def structured_traceback( + self, etype, evalue, tb, tb_offset: Optional[int] = None, context=5, mode=None + ): + """Return a list of traceback frames. + + Must be implemented by each class. + """ + raise NotImplementedError() + + +#--------------------------------------------------------------------------- +class ListTB(TBTools): + """Print traceback information from a traceback list, with optional color. + + Calling requires 3 arguments: (etype, evalue, elist) + as would be obtained by:: + + etype, evalue, tb = sys.exc_info() + if tb: + elist = traceback.extract_tb(tb) + else: + elist = None + + It can thus be used by programs which need to process the traceback before + printing (such as console replacements based on the code module from the + standard library). + + Because they are meant to be called without a full traceback (only a + list), instances of this class can't call the interactive pdb debugger.""" + + + def __call__(self, etype, value, elist): + self.ostream.flush() + self.ostream.write(self.text(etype, value, elist)) + self.ostream.write('\n') + + def _extract_tb(self, tb): + if tb: + return traceback.extract_tb(tb) + else: + return None + + def structured_traceback( + self, + etype: type, + evalue: BaseException, + etb: Optional[TracebackType] = None, + tb_offset: Optional[int] = None, + context=5, + ): + """Return a color formatted string with the traceback info. + + Parameters + ---------- + etype : exception type + Type of the exception raised. + evalue : object + Data stored in the exception + etb : list | TracebackType | None + If list: List of frames, see class docstring for details. + If Traceback: Traceback of the exception. + tb_offset : int, optional + Number of frames in the traceback to skip. If not given, the + instance evalue is used (set in constructor). + context : int, optional + Number of lines of context information to print. + + Returns + ------- + String with formatted exception. + """ + # This is a workaround to get chained_exc_ids in recursive calls + # etb should not be a tuple if structured_traceback is not recursive + if isinstance(etb, tuple): + etb, chained_exc_ids = etb + else: + chained_exc_ids = set() + + if isinstance(etb, list): + elist = etb + elif etb is not None: + elist = self._extract_tb(etb) + else: + elist = [] + tb_offset = self.tb_offset if tb_offset is None else tb_offset + assert isinstance(tb_offset, int) + Colors = self.Colors + out_list = [] + if elist: + + if tb_offset and len(elist) > tb_offset: + elist = elist[tb_offset:] + + out_list.append('Traceback %s(most recent call last)%s:' % + (Colors.normalEm, Colors.Normal) + '\n') + out_list.extend(self._format_list(elist)) + # The exception info should be a single entry in the list. + lines = ''.join(self._format_exception_only(etype, evalue)) + out_list.append(lines) + + exception = self.get_parts_of_chained_exception(evalue) + + if exception and not id(exception[1]) in chained_exc_ids: + chained_exception_message = self.prepare_chained_exception_message( + evalue.__cause__)[0] + etype, evalue, etb = exception + # Trace exception to avoid infinite 'cause' loop + chained_exc_ids.add(id(exception[1])) + chained_exceptions_tb_offset = 0 + out_list = ( + self.structured_traceback( + etype, evalue, (etb, chained_exc_ids), + chained_exceptions_tb_offset, context) + + chained_exception_message + + out_list) + + return out_list + + def _format_list(self, extracted_list): + """Format a list of traceback entry tuples for printing. + + Given a list of tuples as returned by extract_tb() or + extract_stack(), return a list of strings ready for printing. + Each string in the resulting list corresponds to the item with the + same index in the argument list. Each string ends in a newline; + the strings may contain internal newlines as well, for those items + whose source text line is not None. + + Lifted almost verbatim from traceback.py + """ + + Colors = self.Colors + list = [] + for ind, (filename, lineno, name, line) in enumerate(extracted_list): + normalCol, nameCol, fileCol, lineCol = ( + # Emphasize the last entry + (Colors.normalEm, Colors.nameEm, Colors.filenameEm, Colors.line) + if ind == len(extracted_list) - 1 + else (Colors.Normal, Colors.name, Colors.filename, "") + ) + + fns = _format_filename(filename, fileCol, normalCol, lineno=lineno) + item = f"{normalCol} {fns}" + + if name != "": + item += f" in {nameCol}{name}{normalCol}\n" + else: + item += "\n" + if line: + item += f"{lineCol} {line.strip()}{normalCol}\n" + list.append(item) + + return list + + def _format_exception_only(self, etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.exc_info()[:2]. The return value is a list of strings, each ending + in a newline. Normally, the list contains a single string; however, + for SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax error + occurred. The message indicating which exception occurred is the + always last string in the list. + + Also lifted nearly verbatim from traceback.py + """ + have_filedata = False + Colors = self.Colors + list = [] + stype = py3compat.cast_unicode(Colors.excName + etype.__name__ + Colors.Normal) + if value is None: + # Not sure if this can still happen in Python 2.6 and above + list.append(stype + '\n') + else: + if issubclass(etype, SyntaxError): + have_filedata = True + if not value.filename: value.filename = "" + if value.lineno: + lineno = value.lineno + textline = linecache.getline(value.filename, value.lineno) + else: + lineno = "unknown" + textline = "" + list.append( + "%s %s%s\n" + % ( + Colors.normalEm, + _format_filename( + value.filename, + Colors.filenameEm, + Colors.normalEm, + lineno=(None if lineno == "unknown" else lineno), + ), + Colors.Normal, + ) + ) + if textline == "": + textline = py3compat.cast_unicode(value.text, "utf-8") + + if textline is not None: + i = 0 + while i < len(textline) and textline[i].isspace(): + i += 1 + list.append('%s %s%s\n' % (Colors.line, + textline.strip(), + Colors.Normal)) + if value.offset is not None: + s = ' ' + for c in textline[i:value.offset - 1]: + if c.isspace(): + s += c + else: + s += ' ' + list.append('%s%s^%s\n' % (Colors.caret, s, + Colors.Normal)) + + try: + s = value.msg + except Exception: + s = self._some_str(value) + if s: + list.append('%s%s:%s %s\n' % (stype, Colors.excName, + Colors.Normal, s)) + else: + list.append('%s\n' % stype) + + # sync with user hooks + if have_filedata: + ipinst = get_ipython() + if ipinst is not None: + ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0) + + return list + + def get_exception_only(self, etype, value): + """Only print the exception type and message, without a traceback. + + Parameters + ---------- + etype : exception type + value : exception value + """ + return ListTB.structured_traceback(self, etype, value) + + def show_exception_only(self, etype, evalue): + """Only print the exception type and message, without a traceback. + + Parameters + ---------- + etype : exception type + evalue : exception value + """ + # This method needs to use __call__ from *this* class, not the one from + # a subclass whose signature or behavior may be different + ostream = self.ostream + ostream.flush() + ostream.write('\n'.join(self.get_exception_only(etype, evalue))) + ostream.flush() + + def _some_str(self, value): + # Lifted from traceback.py + try: + return py3compat.cast_unicode(str(value)) + except: + return u'' % type(value).__name__ + + +#---------------------------------------------------------------------------- +class VerboseTB(TBTools): + """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead + of HTML. Requires inspect and pydoc. Crazy, man. + + Modified version which optionally strips the topmost entries from the + traceback, to be used with alternate interpreters (because their own code + would appear in the traceback).""" + + _tb_highlight = "bg:ansiyellow" + + def __init__( + self, + color_scheme: str = "Linux", + call_pdb: bool = False, + ostream=None, + tb_offset: int = 0, + long_header: bool = False, + include_vars: bool = True, + check_cache=None, + debugger_cls=None, + parent=None, + config=None, + ): + """Specify traceback offset, headers and color scheme. + + Define how many frames to drop from the tracebacks. Calling it with + tb_offset=1 allows use of this handler in interpreters which will have + their own code at the top of the traceback (VerboseTB will first + remove that frame before printing the traceback info).""" + TBTools.__init__( + self, + color_scheme=color_scheme, + call_pdb=call_pdb, + ostream=ostream, + parent=parent, + config=config, + debugger_cls=debugger_cls, + ) + self.tb_offset = tb_offset + self.long_header = long_header + self.include_vars = include_vars + # By default we use linecache.checkcache, but the user can provide a + # different check_cache implementation. This was formerly used by the + # IPython kernel for interactive code, but is no longer necessary. + if check_cache is None: + check_cache = linecache.checkcache + self.check_cache = check_cache + + self.skip_hidden = True + + def format_record(self, frame_info): + """Format a single stack frame""" + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + + if isinstance(frame_info, stack_data.RepeatedFrames): + return ' %s[... skipping similar frames: %s]%s\n' % ( + Colors.excName, frame_info.description, ColorsNormal) + + indent = " " * INDENT_SIZE + em_normal = "%s\n%s%s" % (Colors.valEm, indent, ColorsNormal) + tpl_call = f"in {Colors.vName}{{file}}{Colors.valEm}{{scope}}{ColorsNormal}" + tpl_call_fail = "in %s%%s%s(***failed resolving arguments***)%s" % ( + Colors.vName, + Colors.valEm, + ColorsNormal, + ) + tpl_name_val = "%%s %s= %%s%s" % (Colors.valEm, ColorsNormal) + + link = _format_filename( + frame_info.filename, + Colors.filenameEm, + ColorsNormal, + lineno=frame_info.lineno, + ) + args, varargs, varkw, locals_ = inspect.getargvalues(frame_info.frame) + + func = frame_info.executing.code_qualname() + if func == "": + call = "" + else: + # Decide whether to include variable details or not + var_repr = eqrepr if self.include_vars else nullrepr + try: + scope = inspect.formatargvalues( + args, varargs, varkw, locals_, formatvalue=var_repr + ) + call = tpl_call.format(file=func, scope=scope) + except KeyError: + # This happens in situations like errors inside generator + # expressions, where local variables are listed in the + # line, but can't be extracted from the frame. I'm not + # 100% sure this isn't actually a bug in inspect itself, + # but since there's no info for us to compute with, the + # best we can do is report the failure and move on. Here + # we must *not* call any traceback construction again, + # because that would mess up use of %debug later on. So we + # simply report the failure and move on. The only + # limitation will be that this frame won't have locals + # listed in the call signature. Quite subtle problem... + # I can't think of a good way to validate this in a unit + # test, but running a script consisting of: + # dict( (k,v.strip()) for (k,v) in range(10) ) + # will illustrate the error, if this exception catch is + # disabled. + call = tpl_call_fail % func + + lvals = '' + lvals_list = [] + if self.include_vars: + try: + # we likely want to fix stackdata at some point, but + # still need a workaround. + fibp = frame_info.variables_in_executing_piece + for var in fibp: + lvals_list.append(tpl_name_val % (var.name, repr(var.value))) + except Exception: + lvals_list.append( + "Exception trying to inspect frame. No more locals available." + ) + if lvals_list: + lvals = '%s%s' % (indent, em_normal.join(lvals_list)) + + result = f'{link}{", " if call else ""}{call}\n' + + result += ''.join(_format_traceback_lines(frame_info.lines, Colors, self.has_colors, lvals)) + return result + + def prepare_header(self, etype, long_version=False): + colors = self.Colors # just a shorthand + quicker name lookup + colorsnormal = colors.Normal # used a lot + exc = '%s%s%s' % (colors.excName, etype, colorsnormal) + width = min(75, get_terminal_size()[0]) + if long_version: + # Header with the exception type, python version, and date + pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable + date = time.ctime(time.time()) + + head = '%s%s%s\n%s%s%s\n%s' % (colors.topline, '-' * width, colorsnormal, + exc, ' ' * (width - len(str(etype)) - len(pyver)), + pyver, date.rjust(width) ) + head += "\nA problem occurred executing Python code. Here is the sequence of function" \ + "\ncalls leading up to the error, with the most recent (innermost) call last." + else: + # Simplified header + head = '%s%s' % (exc, 'Traceback (most recent call last)'. \ + rjust(width - len(str(etype))) ) + + return head + + def format_exception(self, etype, evalue): + colors = self.Colors # just a shorthand + quicker name lookup + colorsnormal = colors.Normal # used a lot + # Get (safely) a string form of the exception info + try: + etype_str, evalue_str = map(str, (etype, evalue)) + except: + # User exception is improperly defined. + etype, evalue = str, sys.exc_info()[:2] + etype_str, evalue_str = map(str, (etype, evalue)) + # ... and format it + return ['%s%s%s: %s' % (colors.excName, etype_str, + colorsnormal, py3compat.cast_unicode(evalue_str))] + + def format_exception_as_a_whole( + self, + etype: type, + evalue: BaseException, + etb: Optional[TracebackType], + number_of_lines_of_context, + tb_offset: Optional[int], + ): + """Formats the header, traceback and exception message for a single exception. + + This may be called multiple times by Python 3 exception chaining + (PEP 3134). + """ + # some locals + orig_etype = etype + try: + etype = etype.__name__ + except AttributeError: + pass + + tb_offset = self.tb_offset if tb_offset is None else tb_offset + assert isinstance(tb_offset, int) + head = self.prepare_header(etype, self.long_header) + records = ( + self.get_records(etb, number_of_lines_of_context, tb_offset) if etb else [] + ) + + frames = [] + skipped = 0 + lastrecord = len(records) - 1 + for i, r in enumerate(records): + if not isinstance(r, stack_data.RepeatedFrames) and self.skip_hidden: + if r.frame.f_locals.get("__tracebackhide__", 0) and i != lastrecord: + skipped += 1 + continue + if skipped: + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + frames.append( + " %s[... skipping hidden %s frame]%s\n" + % (Colors.excName, skipped, ColorsNormal) + ) + skipped = 0 + frames.append(self.format_record(r)) + if skipped: + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + frames.append( + " %s[... skipping hidden %s frame]%s\n" + % (Colors.excName, skipped, ColorsNormal) + ) + + formatted_exception = self.format_exception(etype, evalue) + if records: + frame_info = records[-1] + ipinst = get_ipython() + if ipinst is not None: + ipinst.hooks.synchronize_with_editor(frame_info.filename, frame_info.lineno, 0) + + return [[head] + frames + [''.join(formatted_exception[0])]] + + def get_records( + self, etb: TracebackType, number_of_lines_of_context: int, tb_offset: int + ): + assert etb is not None + context = number_of_lines_of_context - 1 + after = context // 2 + before = context - after + if self.has_colors: + style = get_style_by_name("default") + style = stack_data.style_with_executing_node(style, self._tb_highlight) + formatter = Terminal256Formatter(style=style) + else: + formatter = None + options = stack_data.Options( + before=before, + after=after, + pygments_formatter=formatter, + ) + return list(stack_data.FrameInfo.stack_data(etb, options=options))[tb_offset:] + + def structured_traceback( + self, + etype: type, + evalue: Optional[BaseException], + etb: Optional[TracebackType], + tb_offset: Optional[int] = None, + number_of_lines_of_context: int = 5, + ): + """Return a nice text document describing the traceback.""" + formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context, + tb_offset) + + colors = self.Colors # just a shorthand + quicker name lookup + colorsnormal = colors.Normal # used a lot + head = '%s%s%s' % (colors.topline, '-' * min(75, get_terminal_size()[0]), colorsnormal) + structured_traceback_parts = [head] + chained_exceptions_tb_offset = 0 + lines_of_context = 3 + formatted_exceptions = formatted_exception + exception = self.get_parts_of_chained_exception(evalue) + if exception: + assert evalue is not None + formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) + etype, evalue, etb = exception + else: + evalue = None + chained_exc_ids = set() + while evalue: + formatted_exceptions += self.format_exception_as_a_whole(etype, evalue, etb, lines_of_context, + chained_exceptions_tb_offset) + exception = self.get_parts_of_chained_exception(evalue) + + if exception and not id(exception[1]) in chained_exc_ids: + chained_exc_ids.add(id(exception[1])) # trace exception to avoid infinite 'cause' loop + formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) + etype, evalue, etb = exception + else: + evalue = None + + # we want to see exceptions in a reversed order: + # the first exception should be on top + for formatted_exception in reversed(formatted_exceptions): + structured_traceback_parts += formatted_exception + + return structured_traceback_parts + + def debugger(self, force: bool = False): + """Call up the pdb debugger if desired, always clean up the tb + reference. + + Keywords: + + - force(False): by default, this routine checks the instance call_pdb + flag and does not actually invoke the debugger if the flag is false. + The 'force' option forces the debugger to activate even if the flag + is false. + + If the call_pdb flag is set, the pdb interactive debugger is + invoked. In all cases, the self.tb reference to the current traceback + is deleted to prevent lingering references which hamper memory + management. + + Note that each call to pdb() does an 'import readline', so if your app + requires a special setup for the readline completers, you'll have to + fix that by hand after invoking the exception handler.""" + + if force or self.call_pdb: + if self.pdb is None: + self.pdb = self.debugger_cls() + # the system displayhook may have changed, restore the original + # for pdb + display_trap = DisplayTrap(hook=sys.__displayhook__) + with display_trap: + self.pdb.reset() + # Find the right frame so we don't pop up inside ipython itself + if hasattr(self, 'tb') and self.tb is not None: + etb = self.tb + else: + etb = self.tb = sys.last_traceback + while self.tb is not None and self.tb.tb_next is not None: + assert self.tb.tb_next is not None + self.tb = self.tb.tb_next + if etb and etb.tb_next: + etb = etb.tb_next + self.pdb.botframe = etb.tb_frame + self.pdb.interaction(None, etb) + + if hasattr(self, 'tb'): + del self.tb + + def handler(self, info=None): + (etype, evalue, etb) = info or sys.exc_info() + self.tb = etb + ostream = self.ostream + ostream.flush() + ostream.write(self.text(etype, evalue, etb)) + ostream.write('\n') + ostream.flush() + + # Changed so an instance can just be called as VerboseTB_inst() and print + # out the right info on its own. + def __call__(self, etype=None, evalue=None, etb=None): + """This hook can replace sys.excepthook (for Python 2.1 or higher).""" + if etb is None: + self.handler() + else: + self.handler((etype, evalue, etb)) + try: + self.debugger() + except KeyboardInterrupt: + print("\nKeyboardInterrupt") + + +#---------------------------------------------------------------------------- +class FormattedTB(VerboseTB, ListTB): + """Subclass ListTB but allow calling with a traceback. + + It can thus be used as a sys.excepthook for Python > 2.1. + + Also adds 'Context' and 'Verbose' modes, not available in ListTB. + + Allows a tb_offset to be specified. This is useful for situations where + one needs to remove a number of topmost frames from the traceback (such as + occurs with python programs that themselves execute other python code, + like Python shells). """ + + mode: str + + def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False, + ostream=None, + tb_offset=0, long_header=False, include_vars=False, + check_cache=None, debugger_cls=None, + parent=None, config=None): + + # NEVER change the order of this list. Put new modes at the end: + self.valid_modes = ['Plain', 'Context', 'Verbose', 'Minimal'] + self.verbose_modes = self.valid_modes[1:3] + + VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, + ostream=ostream, tb_offset=tb_offset, + long_header=long_header, include_vars=include_vars, + check_cache=check_cache, debugger_cls=debugger_cls, + parent=parent, config=config) + + # Different types of tracebacks are joined with different separators to + # form a single string. They are taken from this dict + self._join_chars = dict(Plain='', Context='\n', Verbose='\n', + Minimal='') + # set_mode also sets the tb_join_char attribute + self.set_mode(mode) + + def structured_traceback(self, etype, value, tb, tb_offset=None, number_of_lines_of_context=5): + tb_offset = self.tb_offset if tb_offset is None else tb_offset + mode = self.mode + if mode in self.verbose_modes: + # Verbose modes need a full traceback + return VerboseTB.structured_traceback( + self, etype, value, tb, tb_offset, number_of_lines_of_context + ) + elif mode == 'Minimal': + return ListTB.get_exception_only(self, etype, value) + else: + # We must check the source cache because otherwise we can print + # out-of-date source code. + self.check_cache() + # Now we can extract and format the exception + return ListTB.structured_traceback( + self, etype, value, tb, tb_offset, number_of_lines_of_context + ) + + def stb2text(self, stb): + """Convert a structured traceback (a list) to a string.""" + return self.tb_join_char.join(stb) + + def set_mode(self, mode: Optional[str] = None): + """Switch to the desired mode. + + If mode is not specified, cycles through the available modes.""" + + if not mode: + new_idx = (self.valid_modes.index(self.mode) + 1 ) % \ + len(self.valid_modes) + self.mode = self.valid_modes[new_idx] + elif mode not in self.valid_modes: + raise ValueError( + "Unrecognized mode in FormattedTB: <" + mode + ">\n" + "Valid modes: " + str(self.valid_modes) + ) + else: + assert isinstance(mode, str) + self.mode = mode + # include variable details only in 'Verbose' mode + self.include_vars = (self.mode == self.valid_modes[2]) + # Set the join character for generating text tracebacks + self.tb_join_char = self._join_chars[self.mode] + + # some convenient shortcuts + def plain(self): + self.set_mode(self.valid_modes[0]) + + def context(self): + self.set_mode(self.valid_modes[1]) + + def verbose(self): + self.set_mode(self.valid_modes[2]) + + def minimal(self): + self.set_mode(self.valid_modes[3]) + + +#---------------------------------------------------------------------------- +class AutoFormattedTB(FormattedTB): + """A traceback printer which can be called on the fly. + + It will find out about exceptions by itself. + + A brief example:: + + AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') + try: + ... + except: + AutoTB() # or AutoTB(out=logfile) where logfile is an open file object + """ + + def __call__(self, etype=None, evalue=None, etb=None, + out=None, tb_offset=None): + """Print out a formatted exception traceback. + + Optional arguments: + - out: an open file-like object to direct output to. + + - tb_offset: the number of frames to skip over in the stack, on a + per-call basis (this overrides temporarily the instance's tb_offset + given at initialization time.""" + + if out is None: + out = self.ostream + out.flush() + out.write(self.text(etype, evalue, etb, tb_offset)) + out.write('\n') + out.flush() + # FIXME: we should remove the auto pdb behavior from here and leave + # that to the clients. + try: + self.debugger() + except KeyboardInterrupt: + print("\nKeyboardInterrupt") + + def structured_traceback(self, etype=None, value=None, tb=None, + tb_offset=None, number_of_lines_of_context=5): + + etype: type + value: BaseException + # tb: TracebackType or tupleof tb types ? + if etype is None: + etype, value, tb = sys.exc_info() + if isinstance(tb, tuple): + # tb is a tuple if this is a chained exception. + self.tb = tb[0] + else: + self.tb = tb + return FormattedTB.structured_traceback( + self, etype, value, tb, tb_offset, number_of_lines_of_context) + + +#--------------------------------------------------------------------------- + +# A simple class to preserve Nathan's original functionality. +class ColorTB(FormattedTB): + """Shorthand to initialize a FormattedTB in Linux colors mode.""" + + def __init__(self, color_scheme='Linux', call_pdb=0, **kwargs): + FormattedTB.__init__(self, color_scheme=color_scheme, + call_pdb=call_pdb, **kwargs) + + +class SyntaxTB(ListTB): + """Extension which holds some state: the last exception value""" + + def __init__(self, color_scheme='NoColor', parent=None, config=None): + ListTB.__init__(self, color_scheme, parent=parent, config=config) + self.last_syntax_error = None + + def __call__(self, etype, value, elist): + self.last_syntax_error = value + + ListTB.__call__(self, etype, value, elist) + + def structured_traceback(self, etype, value, elist, tb_offset=None, + context=5): + # If the source file has been edited, the line in the syntax error can + # be wrong (retrieved from an outdated cache). This replaces it with + # the current value. + if isinstance(value, SyntaxError) \ + and isinstance(value.filename, str) \ + and isinstance(value.lineno, int): + linecache.checkcache(value.filename) + newtext = linecache.getline(value.filename, value.lineno) + if newtext: + value.text = newtext + self.last_syntax_error = value + return super(SyntaxTB, self).structured_traceback(etype, value, elist, + tb_offset=tb_offset, context=context) + + def clear_err_state(self): + """Return the current error state and clear it""" + e = self.last_syntax_error + self.last_syntax_error = None + return e + + def stb2text(self, stb): + """Convert a structured traceback (a list) to a string.""" + return ''.join(stb) + + +# some internal-use functions +def text_repr(value): + """Hopefully pretty robust repr equivalent.""" + # this is pretty horrible but should always return *something* + try: + return pydoc.text.repr(value) + except KeyboardInterrupt: + raise + except: + try: + return repr(value) + except KeyboardInterrupt: + raise + except: + try: + # all still in an except block so we catch + # getattr raising + name = getattr(value, '__name__', None) + if name: + # ick, recursion + return text_repr(name) + klass = getattr(value, '__class__', None) + if klass: + return '%s instance' % text_repr(klass) + except KeyboardInterrupt: + raise + except: + return 'UNRECOVERABLE REPR FAILURE' + + +def eqrepr(value, repr=text_repr): + return '=%s' % repr(value) + + +def nullrepr(value, repr=text_repr): + return '' diff --git a/myenv/lib/python3.10/site-packages/IPython/core/usage.py b/myenv/lib/python3.10/site-packages/IPython/core/usage.py new file mode 100644 index 000000000..53219bceb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/core/usage.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +"""Usage information for the main IPython applications. +""" +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# Copyright (C) 2001-2007 Fernando Perez. +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +import sys +from IPython.core import release + +cl_usage = """\ +========= + IPython +========= + +Tools for Interactive Computing in Python +========================================= + + A Python shell with automatic history (input and output), dynamic object + introspection, easier configuration, command completion, access to the + system shell and more. IPython can also be embedded in running programs. + + +Usage + + ipython [subcommand] [options] [-c cmd | -m mod | file] [--] [arg] ... + + If invoked with no options, it executes the file and exits, passing the + remaining arguments to the script, just as if you had specified the same + command with python. You may need to specify `--` before args to be passed + to the script, to prevent IPython from attempting to parse them. If you + specify the option `-i` before the filename, it will enter an interactive + IPython session after running the script, rather than exiting. Files ending + in .py will be treated as normal Python, but files ending in .ipy can + contain special IPython syntax (magic commands, shell expansions, etc.). + + Almost all configuration in IPython is available via the command-line. Do + `ipython --help-all` to see all available options. For persistent + configuration, look into your `ipython_config.py` configuration file for + details. + + This file is typically installed in the `IPYTHONDIR` directory, and there + is a separate configuration directory for each profile. The default profile + directory will be located in $IPYTHONDIR/profile_default. IPYTHONDIR + defaults to to `$HOME/.ipython`. For Windows users, $HOME resolves to + C:\\Users\\YourUserName in most instances. + + To initialize a profile with the default configuration file, do:: + + $> ipython profile create + + and start editing `IPYTHONDIR/profile_default/ipython_config.py` + + In IPython's documentation, we will refer to this directory as + `IPYTHONDIR`, you can change its default location by creating an + environment variable with this name and setting it to the desired path. + + For more information, see the manual available in HTML and PDF in your + installation, or online at https://ipython.org/documentation.html. +""" + +interactive_usage = """ +IPython -- An enhanced Interactive Python +========================================= + +IPython offers a fully compatible replacement for the standard Python +interpreter, with convenient shell features, special commands, command +history mechanism and output results caching. + +At your system command line, type 'ipython -h' to see the command line +options available. This document only describes interactive features. + +GETTING HELP +------------ + +Within IPython you have various way to access help: + + ? -> Introduction and overview of IPython's features (this screen). + object? -> Details about 'object'. + object?? -> More detailed, verbose information about 'object'. + %quickref -> Quick reference of all IPython specific syntax and magics. + help -> Access Python's own help system. + +If you are in terminal IPython you can quit this screen by pressing `q`. + + +MAIN FEATURES +------------- + +* Access to the standard Python help with object docstrings and the Python + manuals. Simply type 'help' (no quotes) to invoke it. + +* Magic commands: type %magic for information on the magic subsystem. + +* System command aliases, via the %alias command or the configuration file(s). + +* Dynamic object information: + + Typing ?word or word? prints detailed information about an object. Certain + long strings (code, etc.) get snipped in the center for brevity. + + Typing ??word or word?? gives access to the full information without + snipping long strings. Strings that are longer than the screen are printed + through the less pager. + + The ?/?? system gives access to the full source code for any object (if + available), shows function prototypes and other useful information. + + If you just want to see an object's docstring, type '%pdoc object' (without + quotes, and without % if you have automagic on). + +* Tab completion in the local namespace: + + At any time, hitting tab will complete any available python commands or + variable names, and show you a list of the possible completions if there's + no unambiguous one. It will also complete filenames in the current directory. + +* Search previous command history in multiple ways: + + - Start typing, and then use arrow keys up/down or (Ctrl-p/Ctrl-n) to search + through the history items that match what you've typed so far. + + - Hit Ctrl-r: opens a search prompt. Begin typing and the system searches + your history for lines that match what you've typed so far, completing as + much as it can. + + - %hist: search history by index. + +* Persistent command history across sessions. + +* Logging of input with the ability to save and restore a working session. + +* System shell with !. Typing !ls will run 'ls' in the current directory. + +* The reload command does a 'deep' reload of a module: changes made to the + module since you imported will actually be available without having to exit. + +* Verbose and colored exception traceback printouts. See the magic xmode and + xcolor functions for details (just type %magic). + +* Input caching system: + + IPython offers numbered prompts (In/Out) with input and output caching. All + input is saved and can be retrieved as variables (besides the usual arrow + key recall). + + The following GLOBAL variables always exist (so don't overwrite them!): + _i: stores previous input. + _ii: next previous. + _iii: next-next previous. + _ih : a list of all input _ih[n] is the input from line n. + + Additionally, global variables named _i are dynamically created ( + being the prompt counter), such that _i == _ih[] + + For example, what you typed at prompt 14 is available as _i14 and _ih[14]. + + You can create macros which contain multiple input lines from this history, + for later re-execution, with the %macro function. + + The history function %hist allows you to see any part of your input history + by printing a range of the _i variables. Note that inputs which contain + magic functions (%) appear in the history with a prepended comment. This is + because they aren't really valid Python code, so you can't exec them. + +* Output caching system: + + For output that is returned from actions, a system similar to the input + cache exists but using _ instead of _i. Only actions that produce a result + (NOT assignments, for example) are cached. If you are familiar with + Mathematica, IPython's _ variables behave exactly like Mathematica's % + variables. + + The following GLOBAL variables always exist (so don't overwrite them!): + _ (one underscore): previous output. + __ (two underscores): next previous. + ___ (three underscores): next-next previous. + + Global variables named _ are dynamically created ( being the prompt + counter), such that the result of output is always available as _. + + Finally, a global dictionary named _oh exists with entries for all lines + which generated output. + +* Directory history: + + Your history of visited directories is kept in the global list _dh, and the + magic %cd command can be used to go to any entry in that list. + +* Auto-parentheses and auto-quotes (adapted from Nathan Gray's LazyPython) + + 1. Auto-parentheses + + Callable objects (i.e. functions, methods, etc) can be invoked like + this (notice the commas between the arguments):: + + In [1]: callable_ob arg1, arg2, arg3 + + and the input will be translated to this:: + + callable_ob(arg1, arg2, arg3) + + This feature is off by default (in rare cases it can produce + undesirable side-effects), but you can activate it at the command-line + by starting IPython with `--autocall 1`, set it permanently in your + configuration file, or turn on at runtime with `%autocall 1`. + + You can force auto-parentheses by using '/' as the first character + of a line. For example:: + + In [1]: /globals # becomes 'globals()' + + Note that the '/' MUST be the first character on the line! This + won't work:: + + In [2]: print /globals # syntax error + + In most cases the automatic algorithm should work, so you should + rarely need to explicitly invoke /. One notable exception is if you + are trying to call a function with a list of tuples as arguments (the + parenthesis will confuse IPython):: + + In [1]: zip (1,2,3),(4,5,6) # won't work + + but this will work:: + + In [2]: /zip (1,2,3),(4,5,6) + ------> zip ((1,2,3),(4,5,6)) + Out[2]= [(1, 4), (2, 5), (3, 6)] + + IPython tells you that it has altered your command line by + displaying the new command line preceded by -->. e.g.:: + + In [18]: callable list + -------> callable (list) + + 2. Auto-Quoting + + You can force auto-quoting of a function's arguments by using ',' as + the first character of a line. For example:: + + In [1]: ,my_function /home/me # becomes my_function("/home/me") + + If you use ';' instead, the whole argument is quoted as a single + string (while ',' splits on whitespace):: + + In [2]: ,my_function a b c # becomes my_function("a","b","c") + In [3]: ;my_function a b c # becomes my_function("a b c") + + Note that the ',' MUST be the first character on the line! This + won't work:: + + In [4]: x = ,my_function /home/me # syntax error +""" + +interactive_usage_min = """\ +An enhanced console for Python. +Some of its features are: +- Tab completion in the local namespace. +- Logging of input, see command-line options. +- System shell escape via ! , eg !ls. +- Magic commands, starting with a % (like %ls, %pwd, %cd, etc.) +- Keeps track of locally defined variables via %who, %whos. +- Show object information with a ? eg ?x or x? (use ?? for more info). +""" + +quick_reference = r""" +IPython -- An enhanced Interactive Python - Quick Reference Card +================================================================ + +obj?, obj?? : Get help, or more help for object (also works as + ?obj, ??obj). +?foo.*abc* : List names in 'foo' containing 'abc' in them. +%magic : Information about IPython's 'magic' % functions. + +Magic functions are prefixed by % or %%, and typically take their arguments +without parentheses, quotes or even commas for convenience. Line magics take a +single % and cell magics are prefixed with two %%. + +Example magic function calls: + +%alias d ls -F : 'd' is now an alias for 'ls -F' +alias d ls -F : Works if 'alias' not a python name +alist = %alias : Get list of aliases to 'alist' +cd /usr/share : Obvious. cd - to choose from visited dirs. +%cd?? : See help AND source for magic %cd +%timeit x=10 : time the 'x=10' statement with high precision. +%%timeit x=2**100 +x**100 : time 'x**100' with a setup of 'x=2**100'; setup code is not + counted. This is an example of a cell magic. + +System commands: + +!cp a.txt b/ : System command escape, calls os.system() +cp a.txt b/ : after %rehashx, most system commands work without ! +cp ${f}.txt $bar : Variable expansion in magics and system commands +files = !ls /usr : Capture system command output +files.s, files.l, files.n: "a b c", ['a','b','c'], 'a\nb\nc' + +History: + +_i, _ii, _iii : Previous, next previous, next next previous input +_i4, _ih[2:5] : Input history line 4, lines 2-4 +exec(_i81) : Execute input history line #81 again +%rep 81 : Edit input history line #81 +_, __, ___ : previous, next previous, next next previous output +_dh : Directory history +_oh : Output history +%hist : Command history of current session. +%hist -g foo : Search command history of (almost) all sessions for 'foo'. +%hist -g : Command history of (almost) all sessions. +%hist 1/2-8 : Command history containing lines 2-8 of session 1. +%hist 1/ ~2/ : Command history of session 1 and 2 sessions before current. +%hist ~8/1-~6/5 : Command history from line 1 of 8 sessions ago to + line 5 of 6 sessions ago. +%edit 0/ : Open editor to execute code with history of current session. + +Autocall: + +f 1,2 : f(1,2) # Off by default, enable with %autocall magic. +/f 1,2 : f(1,2) (forced autoparen) +,f 1 2 : f("1","2") +;f 1 2 : f("1 2") + +Remember: TAB completion works in many contexts, not just file names +or python names. + +The following magic functions are currently available: + +""" + +default_banner_parts = ["Python %s\n"%sys.version.split("\n")[0], + "Type 'copyright', 'credits' or 'license' for more information\n" , + "IPython {version} -- An enhanced Interactive Python. Type '?' for help.\n".format(version=release.version), +] + +default_banner = ''.join(default_banner_parts) diff --git a/myenv/lib/python3.10/site-packages/IPython/display.py b/myenv/lib/python3.10/site-packages/IPython/display.py new file mode 100644 index 000000000..b7f64f25c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/display.py @@ -0,0 +1,44 @@ +"""Public API for display tools in IPython. +""" + +# ----------------------------------------------------------------------------- +# Copyright (C) 2012 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# Imports +# ----------------------------------------------------------------------------- + +from IPython.core.display_functions import * +from IPython.core.display import ( + display_pretty, + display_html, + display_markdown, + display_svg, + display_png, + display_jpeg, + display_latex, + display_json, + display_javascript, + display_pdf, + DisplayObject, + TextDisplayObject, + Pretty, + HTML, + Markdown, + Math, + Latex, + SVG, + ProgressBar, + JSON, + GeoJSON, + Javascript, + Image, + set_matplotlib_formats, + set_matplotlib_close, + Video, +) +from IPython.lib.display import * diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/__init__.py b/myenv/lib/python3.10/site-packages/IPython/extensions/__init__.py new file mode 100644 index 000000000..db7f79fca --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/extensions/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +"""This directory is meant for IPython extensions.""" diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..f5a337bf6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/autoreload.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/autoreload.cpython-310.pyc new file mode 100644 index 000000000..bcf8af0f2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/autoreload.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/storemagic.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/storemagic.cpython-310.pyc new file mode 100644 index 000000000..27293f10d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/extensions/__pycache__/storemagic.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/autoreload.py b/myenv/lib/python3.10/site-packages/IPython/extensions/autoreload.py new file mode 100644 index 000000000..f485ac38b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/extensions/autoreload.py @@ -0,0 +1,631 @@ +"""IPython extension to reload modules before executing user code. + +``autoreload`` reloads modules automatically before entering the execution of +code typed at the IPython prompt. + +This makes for example the following workflow possible: + +.. sourcecode:: ipython + + In [1]: %load_ext autoreload + + In [2]: %autoreload 2 + + In [3]: from foo import some_function + + In [4]: some_function() + Out[4]: 42 + + In [5]: # open foo.py in an editor and change some_function to return 43 + + In [6]: some_function() + Out[6]: 43 + +The module was reloaded without reloading it explicitly, and the object +imported with ``from foo import ...`` was also updated. + +Usage +===== + +The following magic commands are provided: + +``%autoreload`` + + Reload all modules (except those excluded by ``%aimport``) + automatically now. + +``%autoreload 0`` + + Disable automatic reloading. + +``%autoreload 1`` + + Reload all modules imported with ``%aimport`` every time before + executing the Python code typed. + +``%autoreload 2`` + + Reload all modules (except those excluded by ``%aimport``) every + time before executing the Python code typed. + +``%autoreload 3`` + + Reload all modules AND autoload newly added objects + every time before executing the Python code typed. + +``%aimport`` + + List modules which are to be automatically imported or not to be imported. + +``%aimport foo`` + + Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1`` + +``%aimport foo, bar`` + + Import modules 'foo', 'bar' and mark them to be autoreloaded for ``%autoreload 1`` + +``%aimport -foo`` + + Mark module 'foo' to not be autoreloaded. + +Caveats +======= + +Reloading Python modules in a reliable way is in general difficult, +and unexpected things may occur. ``%autoreload`` tries to work around +common pitfalls by replacing function code objects and parts of +classes previously in the module with new versions. This makes the +following things to work: + +- Functions and classes imported via 'from xxx import foo' are upgraded + to new versions when 'xxx' is reloaded. + +- Methods and properties of classes are upgraded on reload, so that + calling 'c.foo()' on an object 'c' created before the reload causes + the new code for 'foo' to be executed. + +Some of the known remaining caveats are: + +- Replacing code objects does not always succeed: changing a @property + in a class to an ordinary method or a method to a member variable + can cause problems (but in old objects only). + +- Functions that are removed (eg. via monkey-patching) from a module + before it is reloaded are not upgraded. + +- C extension modules cannot be reloaded, and so cannot be autoreloaded. + +- While comparing Enum and Flag, the 'is' Identity Operator is used (even in the case '==' has been used (Similar to the 'None' keyword)). + +- Reloading a module, or importing the same module by a different name, creates new Enums. These may look the same, but are not. +""" + +__skip_doctest__ = True + +# ----------------------------------------------------------------------------- +# Copyright (C) 2000 Thomas Heller +# Copyright (C) 2008 Pauli Virtanen +# Copyright (C) 2012 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +# ----------------------------------------------------------------------------- +# +# This IPython module is written by Pauli Virtanen, based on the autoreload +# code by Thomas Heller. + +# ----------------------------------------------------------------------------- +# Imports +# ----------------------------------------------------------------------------- + +import os +import sys +import traceback +import types +import weakref +import gc +from importlib import import_module, reload +from importlib.util import source_from_cache + +# ------------------------------------------------------------------------------ +# Autoreload functionality +# ------------------------------------------------------------------------------ + + +class ModuleReloader: + enabled = False + """Whether this reloader is enabled""" + + check_all = True + """Autoreload all modules, not just those listed in 'modules'""" + + autoload_obj = False + """Autoreload all modules AND autoload all new objects""" + + def __init__(self, shell=None): + # Modules that failed to reload: {module: mtime-on-failed-reload, ...} + self.failed = {} + # Modules specially marked as autoreloadable. + self.modules = {} + # Modules specially marked as not autoreloadable. + self.skip_modules = {} + # (module-name, name) -> weakref, for replacing old code objects + self.old_objects = {} + # Module modification timestamps + self.modules_mtimes = {} + self.shell = shell + + # Cache module modification times + self.check(check_all=True, do_reload=False) + + def mark_module_skipped(self, module_name): + """Skip reloading the named module in the future""" + try: + del self.modules[module_name] + except KeyError: + pass + self.skip_modules[module_name] = True + + def mark_module_reloadable(self, module_name): + """Reload the named module in the future (if it is imported)""" + try: + del self.skip_modules[module_name] + except KeyError: + pass + self.modules[module_name] = True + + def aimport_module(self, module_name): + """Import a module, and mark it reloadable + + Returns + ------- + top_module : module + The imported module if it is top-level, or the top-level + top_name : module + Name of top_module + + """ + self.mark_module_reloadable(module_name) + + import_module(module_name) + top_name = module_name.split(".")[0] + top_module = sys.modules[top_name] + return top_module, top_name + + def filename_and_mtime(self, module): + if not hasattr(module, "__file__") or module.__file__ is None: + return None, None + + if getattr(module, "__name__", None) in [None, "__mp_main__", "__main__"]: + # we cannot reload(__main__) or reload(__mp_main__) + return None, None + + filename = module.__file__ + path, ext = os.path.splitext(filename) + + if ext.lower() == ".py": + py_filename = filename + else: + try: + py_filename = source_from_cache(filename) + except ValueError: + return None, None + + try: + pymtime = os.stat(py_filename).st_mtime + except OSError: + return None, None + + return py_filename, pymtime + + def check(self, check_all=False, do_reload=True): + """Check whether some modules need to be reloaded.""" + + if not self.enabled and not check_all: + return + + if check_all or self.check_all: + modules = list(sys.modules.keys()) + else: + modules = list(self.modules.keys()) + + for modname in modules: + m = sys.modules.get(modname, None) + + if modname in self.skip_modules: + continue + + py_filename, pymtime = self.filename_and_mtime(m) + if py_filename is None: + continue + + try: + if pymtime <= self.modules_mtimes[modname]: + continue + except KeyError: + self.modules_mtimes[modname] = pymtime + continue + else: + if self.failed.get(py_filename, None) == pymtime: + continue + + self.modules_mtimes[modname] = pymtime + + # If we've reached this point, we should try to reload the module + if do_reload: + try: + if self.autoload_obj: + superreload(m, reload, self.old_objects, self.shell) + else: + superreload(m, reload, self.old_objects) + if py_filename in self.failed: + del self.failed[py_filename] + except: + print( + "[autoreload of {} failed: {}]".format( + modname, traceback.format_exc(10) + ), + file=sys.stderr, + ) + self.failed[py_filename] = pymtime + + +# ------------------------------------------------------------------------------ +# superreload +# ------------------------------------------------------------------------------ + + +func_attrs = [ + "__code__", + "__defaults__", + "__doc__", + "__closure__", + "__globals__", + "__dict__", +] + + +def update_function(old, new): + """Upgrade the code object of a function""" + for name in func_attrs: + try: + setattr(old, name, getattr(new, name)) + except (AttributeError, TypeError): + pass + + +def update_instances(old, new): + """Use garbage collector to find all instances that refer to the old + class definition and update their __class__ to point to the new class + definition""" + + refs = gc.get_referrers(old) + + for ref in refs: + if type(ref) is old: + object.__setattr__(ref, "__class__", new) + + +def update_class(old, new): + """Replace stuff in the __dict__ of a class, and upgrade + method code objects, and add new methods, if any""" + for key in list(old.__dict__.keys()): + old_obj = getattr(old, key) + try: + new_obj = getattr(new, key) + # explicitly checking that comparison returns True to handle + # cases where `==` doesn't return a boolean. + if (old_obj == new_obj) is True: + continue + except AttributeError: + # obsolete attribute: remove it + try: + delattr(old, key) + except (AttributeError, TypeError): + pass + continue + except ValueError: + # can't compare nested structures containing + # numpy arrays using `==` + pass + + if update_generic(old_obj, new_obj): + continue + + try: + setattr(old, key, getattr(new, key)) + except (AttributeError, TypeError): + pass # skip non-writable attributes + + for key in list(new.__dict__.keys()): + if key not in list(old.__dict__.keys()): + try: + setattr(old, key, getattr(new, key)) + except (AttributeError, TypeError): + pass # skip non-writable attributes + + # update all instances of class + update_instances(old, new) + + +def update_property(old, new): + """Replace get/set/del functions of a property""" + update_generic(old.fdel, new.fdel) + update_generic(old.fget, new.fget) + update_generic(old.fset, new.fset) + + +def isinstance2(a, b, typ): + return isinstance(a, typ) and isinstance(b, typ) + + +UPDATE_RULES = [ + (lambda a, b: isinstance2(a, b, type), update_class), + (lambda a, b: isinstance2(a, b, types.FunctionType), update_function), + (lambda a, b: isinstance2(a, b, property), update_property), +] +UPDATE_RULES.extend( + [ + ( + lambda a, b: isinstance2(a, b, types.MethodType), + lambda a, b: update_function(a.__func__, b.__func__), + ), + ] +) + + +def update_generic(a, b): + for type_check, update in UPDATE_RULES: + if type_check(a, b): + update(a, b) + return True + return False + + +class StrongRef: + def __init__(self, obj): + self.obj = obj + + def __call__(self): + return self.obj + + +mod_attrs = [ + "__name__", + "__doc__", + "__package__", + "__loader__", + "__spec__", + "__file__", + "__cached__", + "__builtins__", +] + + +def append_obj(module, d, name, obj, autoload=False): + in_module = hasattr(obj, "__module__") and obj.__module__ == module.__name__ + if autoload: + # check needed for module global built-ins + if not in_module and name in mod_attrs: + return False + else: + if not in_module: + return False + + key = (module.__name__, name) + try: + d.setdefault(key, []).append(weakref.ref(obj)) + except TypeError: + pass + return True + + +def superreload(module, reload=reload, old_objects=None, shell=None): + """Enhanced version of the builtin reload function. + + superreload remembers objects previously in the module, and + + - upgrades the class dictionary of every old class in the module + - upgrades the code object of every old function and method + - clears the module's namespace before reloading + + """ + if old_objects is None: + old_objects = {} + + # collect old objects in the module + for name, obj in list(module.__dict__.items()): + if not append_obj(module, old_objects, name, obj): + continue + key = (module.__name__, name) + try: + old_objects.setdefault(key, []).append(weakref.ref(obj)) + except TypeError: + pass + + # reload module + try: + # clear namespace first from old cruft + old_dict = module.__dict__.copy() + old_name = module.__name__ + module.__dict__.clear() + module.__dict__["__name__"] = old_name + module.__dict__["__loader__"] = old_dict["__loader__"] + except (TypeError, AttributeError, KeyError): + pass + + try: + module = reload(module) + except: + # restore module dictionary on failed reload + module.__dict__.update(old_dict) + raise + + # iterate over all objects and update functions & classes + for name, new_obj in list(module.__dict__.items()): + key = (module.__name__, name) + if key not in old_objects: + # here 'shell' acts both as a flag and as an output var + if ( + shell is None + or name == "Enum" + or not append_obj(module, old_objects, name, new_obj, True) + ): + continue + shell.user_ns[name] = new_obj + + new_refs = [] + for old_ref in old_objects[key]: + old_obj = old_ref() + if old_obj is None: + continue + new_refs.append(old_ref) + update_generic(old_obj, new_obj) + + if new_refs: + old_objects[key] = new_refs + else: + del old_objects[key] + + return module + + +# ------------------------------------------------------------------------------ +# IPython connectivity +# ------------------------------------------------------------------------------ + +from IPython.core.magic import Magics, magics_class, line_magic + + +@magics_class +class AutoreloadMagics(Magics): + def __init__(self, *a, **kw): + super().__init__(*a, **kw) + self._reloader = ModuleReloader(self.shell) + self._reloader.check_all = False + self._reloader.autoload_obj = False + self.loaded_modules = set(sys.modules) + + @line_magic + def autoreload(self, parameter_s=""): + r"""%autoreload => Reload modules automatically + + %autoreload + Reload all modules (except those excluded by %aimport) automatically + now. + + %autoreload 0 + Disable automatic reloading. + + %autoreload 1 + Reload all modules imported with %aimport every time before executing + the Python code typed. + + %autoreload 2 + Reload all modules (except those excluded by %aimport) every time + before executing the Python code typed. + + Reloading Python modules in a reliable way is in general + difficult, and unexpected things may occur. %autoreload tries to + work around common pitfalls by replacing function code objects and + parts of classes previously in the module with new versions. This + makes the following things to work: + + - Functions and classes imported via 'from xxx import foo' are upgraded + to new versions when 'xxx' is reloaded. + + - Methods and properties of classes are upgraded on reload, so that + calling 'c.foo()' on an object 'c' created before the reload causes + the new code for 'foo' to be executed. + + Some of the known remaining caveats are: + + - Replacing code objects does not always succeed: changing a @property + in a class to an ordinary method or a method to a member variable + can cause problems (but in old objects only). + + - Functions that are removed (eg. via monkey-patching) from a module + before it is reloaded are not upgraded. + + - C extension modules cannot be reloaded, and so cannot be + autoreloaded. + + """ + if parameter_s == "": + self._reloader.check(True) + elif parameter_s == "0": + self._reloader.enabled = False + elif parameter_s == "1": + self._reloader.check_all = False + self._reloader.enabled = True + elif parameter_s == "2": + self._reloader.check_all = True + self._reloader.enabled = True + self._reloader.enabled = True + elif parameter_s == "3": + self._reloader.check_all = True + self._reloader.enabled = True + self._reloader.autoload_obj = True + + @line_magic + def aimport(self, parameter_s="", stream=None): + """%aimport => Import modules for automatic reloading. + + %aimport + List modules to automatically import and not to import. + + %aimport foo + Import module 'foo' and mark it to be autoreloaded for %autoreload 1 + + %aimport foo, bar + Import modules 'foo', 'bar' and mark them to be autoreloaded for %autoreload 1 + + %aimport -foo + Mark module 'foo' to not be autoreloaded for %autoreload 1 + """ + modname = parameter_s + if not modname: + to_reload = sorted(self._reloader.modules.keys()) + to_skip = sorted(self._reloader.skip_modules.keys()) + if stream is None: + stream = sys.stdout + if self._reloader.check_all: + stream.write("Modules to reload:\nall-except-skipped\n") + else: + stream.write("Modules to reload:\n%s\n" % " ".join(to_reload)) + stream.write("\nModules to skip:\n%s\n" % " ".join(to_skip)) + elif modname.startswith("-"): + modname = modname[1:] + self._reloader.mark_module_skipped(modname) + else: + for _module in [_.strip() for _ in modname.split(",")]: + top_module, top_name = self._reloader.aimport_module(_module) + + # Inject module to user namespace + self.shell.push({top_name: top_module}) + + def pre_run_cell(self): + if self._reloader.enabled: + try: + self._reloader.check() + except: + pass + + def post_execute_hook(self): + """Cache the modification times of any modules imported in this execution""" + newly_loaded_modules = set(sys.modules) - self.loaded_modules + for modname in newly_loaded_modules: + _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname]) + if pymtime is not None: + self._reloader.modules_mtimes[modname] = pymtime + + self.loaded_modules.update(newly_loaded_modules) + + +def load_ipython_extension(ip): + """Load the extension in IPython.""" + auto_reload = AutoreloadMagics(ip) + ip.register_magics(auto_reload) + ip.events.register("pre_run_cell", auto_reload.pre_run_cell) + ip.events.register("post_execute", auto_reload.post_execute_hook) diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/storemagic.py b/myenv/lib/python3.10/site-packages/IPython/extensions/storemagic.py new file mode 100644 index 000000000..d9d00f14b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/extensions/storemagic.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +""" +%store magic for lightweight persistence. + +Stores variables, aliases and macros in IPython's database. + +To automatically restore stored variables at startup, add this to your +:file:`ipython_config.py` file:: + + c.StoreMagics.autorestore = True +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import inspect, os, sys, textwrap + +from IPython.core.error import UsageError +from IPython.core.magic import Magics, magics_class, line_magic +from IPython.testing.skipdoctest import skip_doctest +from traitlets import Bool + + +def restore_aliases(ip, alias=None): + staliases = ip.db.get('stored_aliases', {}) + if alias is None: + for k,v in staliases.items(): + #print "restore alias",k,v # dbg + #self.alias_table[k] = v + ip.alias_manager.define_alias(k,v) + else: + ip.alias_manager.define_alias(alias, staliases[alias]) + + +def refresh_variables(ip): + db = ip.db + for key in db.keys('autorestore/*'): + # strip autorestore + justkey = os.path.basename(key) + try: + obj = db[key] + except KeyError: + print("Unable to restore variable '%s', ignoring (use %%store -d to forget!)" % justkey) + print("The error was:", sys.exc_info()[0]) + else: + #print "restored",justkey,"=",obj #dbg + ip.user_ns[justkey] = obj + + +def restore_dhist(ip): + ip.user_ns['_dh'] = ip.db.get('dhist',[]) + + +def restore_data(ip): + refresh_variables(ip) + restore_aliases(ip) + restore_dhist(ip) + + +@magics_class +class StoreMagics(Magics): + """Lightweight persistence for python variables. + + Provides the %store magic.""" + + autorestore = Bool(False, help= + """If True, any %store-d variables will be automatically restored + when IPython starts. + """ + ).tag(config=True) + + def __init__(self, shell): + super(StoreMagics, self).__init__(shell=shell) + self.shell.configurables.append(self) + if self.autorestore: + restore_data(self.shell) + + @skip_doctest + @line_magic + def store(self, parameter_s=''): + """Lightweight persistence for python variables. + + Example:: + + In [1]: l = ['hello',10,'world'] + In [2]: %store l + Stored 'l' (list) + In [3]: exit + + (IPython session is closed and started again...) + + ville@badger:~$ ipython + In [1]: l + NameError: name 'l' is not defined + In [2]: %store -r + In [3]: l + Out[3]: ['hello', 10, 'world'] + + Usage: + + * ``%store`` - Show list of all variables and their current + values + * ``%store spam bar`` - Store the *current* value of the variables spam + and bar to disk + * ``%store -d spam`` - Remove the variable and its value from storage + * ``%store -z`` - Remove all variables from storage + * ``%store -r`` - Refresh all variables, aliases and directory history + from store (overwrite current vals) + * ``%store -r spam bar`` - Refresh specified variables and aliases from store + (delete current val) + * ``%store foo >a.txt`` - Store value of foo to new file a.txt + * ``%store foo >>a.txt`` - Append value of foo to file a.txt + + It should be noted that if you change the value of a variable, you + need to %store it again if you want to persist the new value. + + Note also that the variables will need to be pickleable; most basic + python types can be safely %store'd. + + Also aliases can be %store'd across sessions. + To remove an alias from the storage, use the %unalias magic. + """ + + opts,argsl = self.parse_options(parameter_s,'drz',mode='string') + args = argsl.split() + ip = self.shell + db = ip.db + # delete + if 'd' in opts: + try: + todel = args[0] + except IndexError as e: + raise UsageError('You must provide the variable to forget') from e + else: + try: + del db['autorestore/' + todel] + except BaseException as e: + raise UsageError("Can't delete variable '%s'" % todel) from e + # reset + elif 'z' in opts: + for k in db.keys('autorestore/*'): + del db[k] + + elif 'r' in opts: + if args: + for arg in args: + try: + obj = db['autorestore/' + arg] + except KeyError: + try: + restore_aliases(ip, alias=arg) + except KeyError: + print("no stored variable or alias %s" % arg) + else: + ip.user_ns[arg] = obj + else: + restore_data(ip) + + # run without arguments -> list variables & values + elif not args: + vars = db.keys('autorestore/*') + vars.sort() + if vars: + size = max(map(len, vars)) + else: + size = 0 + + print('Stored variables and their in-db values:') + fmt = '%-'+str(size)+'s -> %s' + get = db.get + for var in vars: + justkey = os.path.basename(var) + # print 30 first characters from every var + print(fmt % (justkey, repr(get(var, ''))[:50])) + + # default action - store the variable + else: + # %store foo >file.txt or >>file.txt + if len(args) > 1 and args[1].startswith(">"): + fnam = os.path.expanduser(args[1].lstrip(">").lstrip()) + if args[1].startswith(">>"): + fil = open(fnam, "a", encoding="utf-8") + else: + fil = open(fnam, "w", encoding="utf-8") + with fil: + obj = ip.ev(args[0]) + print("Writing '%s' (%s) to file '%s'." % (args[0], + obj.__class__.__name__, fnam)) + + if not isinstance (obj, str): + from pprint import pprint + pprint(obj, fil) + else: + fil.write(obj) + if not obj.endswith('\n'): + fil.write('\n') + + return + + # %store foo + for arg in args: + try: + obj = ip.user_ns[arg] + except KeyError: + # it might be an alias + name = arg + try: + cmd = ip.alias_manager.retrieve_alias(name) + except ValueError as e: + raise UsageError("Unknown variable '%s'" % name) from e + + staliases = db.get('stored_aliases',{}) + staliases[name] = cmd + db['stored_aliases'] = staliases + print("Alias stored: %s (%s)" % (name, cmd)) + return + + else: + modname = getattr(inspect.getmodule(obj), '__name__', '') + if modname == '__main__': + print(textwrap.dedent("""\ + Warning:%s is %s + Proper storage of interactively declared classes (or instances + of those classes) is not possible! Only instances + of classes in real modules on file system can be %%store'd. + """ % (arg, obj) )) + return + #pickled = pickle.dumps(obj) + db[ 'autorestore/' + arg ] = obj + print("Stored '%s' (%s)" % (arg, obj.__class__.__name__)) + + +def load_ipython_extension(ip): + """Load the extension in IPython.""" + ip.register_magics(StoreMagics) + diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__init__.py b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..89f945d65 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/test_autoreload.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/test_autoreload.cpython-310.pyc new file mode 100644 index 000000000..d666db084 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/test_autoreload.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/test_storemagic.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/test_storemagic.cpython-310.pyc new file mode 100644 index 000000000..3f024de17 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/__pycache__/test_storemagic.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/tests/test_autoreload.py b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/test_autoreload.py new file mode 100644 index 000000000..2c3c9db21 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/test_autoreload.py @@ -0,0 +1,598 @@ +"""Tests for autoreload extension. +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2012 IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# Imports +# ----------------------------------------------------------------------------- + +import os +import platform +import pytest +import sys +import tempfile +import textwrap +import shutil +import random +import time +from io import StringIO + +import IPython.testing.tools as tt + +from unittest import TestCase + +from IPython.extensions.autoreload import AutoreloadMagics +from IPython.core.events import EventManager, pre_run_cell +from IPython.testing.decorators import skipif_not_numpy + +if platform.python_implementation() == "PyPy": + pytest.skip( + "Current autoreload implementation is extremely slow on PyPy", + allow_module_level=True, + ) + +# ----------------------------------------------------------------------------- +# Test fixture +# ----------------------------------------------------------------------------- + +noop = lambda *a, **kw: None + + +class FakeShell: + def __init__(self): + self.ns = {} + self.user_ns = self.ns + self.user_ns_hidden = {} + self.events = EventManager(self, {"pre_run_cell", pre_run_cell}) + self.auto_magics = AutoreloadMagics(shell=self) + self.events.register("pre_run_cell", self.auto_magics.pre_run_cell) + + register_magics = set_hook = noop + + def run_code(self, code): + self.events.trigger("pre_run_cell") + exec(code, self.user_ns) + self.auto_magics.post_execute_hook() + + def push(self, items): + self.ns.update(items) + + def magic_autoreload(self, parameter): + self.auto_magics.autoreload(parameter) + + def magic_aimport(self, parameter, stream=None): + self.auto_magics.aimport(parameter, stream=stream) + self.auto_magics.post_execute_hook() + + +class Fixture(TestCase): + """Fixture for creating test module files""" + + test_dir = None + old_sys_path = None + filename_chars = "abcdefghijklmopqrstuvwxyz0123456789" + + def setUp(self): + self.test_dir = tempfile.mkdtemp() + self.old_sys_path = list(sys.path) + sys.path.insert(0, self.test_dir) + self.shell = FakeShell() + + def tearDown(self): + shutil.rmtree(self.test_dir) + sys.path = self.old_sys_path + + self.test_dir = None + self.old_sys_path = None + self.shell = None + + def get_module(self): + module_name = "tmpmod_" + "".join(random.sample(self.filename_chars, 20)) + if module_name in sys.modules: + del sys.modules[module_name] + file_name = os.path.join(self.test_dir, module_name + ".py") + return module_name, file_name + + def write_file(self, filename, content): + """ + Write a file, and force a timestamp difference of at least one second + + Notes + ----- + Python's .pyc files record the timestamp of their compilation + with a time resolution of one second. + + Therefore, we need to force a timestamp difference between .py + and .pyc, without having the .py file be timestamped in the + future, and without changing the timestamp of the .pyc file + (because that is stored in the file). The only reliable way + to achieve this seems to be to sleep. + """ + content = textwrap.dedent(content) + # Sleep one second + eps + time.sleep(1.05) + + # Write + with open(filename, "w", encoding="utf-8") as f: + f.write(content) + + def new_module(self, code): + code = textwrap.dedent(code) + mod_name, mod_fn = self.get_module() + with open(mod_fn, "w", encoding="utf-8") as f: + f.write(code) + return mod_name, mod_fn + + +# ----------------------------------------------------------------------------- +# Test automatic reloading +# ----------------------------------------------------------------------------- + + +def pickle_get_current_class(obj): + """ + Original issue comes from pickle; hence the name. + """ + name = obj.__class__.__name__ + module_name = getattr(obj, "__module__", None) + obj2 = sys.modules[module_name] + for subpath in name.split("."): + obj2 = getattr(obj2, subpath) + return obj2 + + +class TestAutoreload(Fixture): + def test_reload_enums(self): + mod_name, mod_fn = self.new_module( + textwrap.dedent( + """ + from enum import Enum + class MyEnum(Enum): + A = 'A' + B = 'B' + """ + ) + ) + self.shell.magic_autoreload("2") + self.shell.magic_aimport(mod_name) + self.write_file( + mod_fn, + textwrap.dedent( + """ + from enum import Enum + class MyEnum(Enum): + A = 'A' + B = 'B' + C = 'C' + """ + ), + ) + with tt.AssertNotPrints( + ("[autoreload of %s failed:" % mod_name), channel="stderr" + ): + self.shell.run_code("pass") # trigger another reload + + def test_reload_class_type(self): + self.shell.magic_autoreload("2") + mod_name, mod_fn = self.new_module( + """ + class Test(): + def meth(self): + return "old" + """ + ) + assert "test" not in self.shell.ns + assert "result" not in self.shell.ns + + self.shell.run_code("from %s import Test" % mod_name) + self.shell.run_code("test = Test()") + + self.write_file( + mod_fn, + """ + class Test(): + def meth(self): + return "new" + """, + ) + + test_object = self.shell.ns["test"] + + # important to trigger autoreload logic ! + self.shell.run_code("pass") + + test_class = pickle_get_current_class(test_object) + assert isinstance(test_object, test_class) + + # extra check. + self.shell.run_code("import pickle") + self.shell.run_code("p = pickle.dumps(test)") + + def test_reload_class_attributes(self): + self.shell.magic_autoreload("2") + mod_name, mod_fn = self.new_module( + textwrap.dedent( + """ + class MyClass: + + def __init__(self, a=10): + self.a = a + self.b = 22 + # self.toto = 33 + + def square(self): + print('compute square') + return self.a*self.a + """ + ) + ) + self.shell.run_code("from %s import MyClass" % mod_name) + self.shell.run_code("first = MyClass(5)") + self.shell.run_code("first.square()") + with self.assertRaises(AttributeError): + self.shell.run_code("first.cube()") + with self.assertRaises(AttributeError): + self.shell.run_code("first.power(5)") + self.shell.run_code("first.b") + with self.assertRaises(AttributeError): + self.shell.run_code("first.toto") + + # remove square, add power + + self.write_file( + mod_fn, + textwrap.dedent( + """ + class MyClass: + + def __init__(self, a=10): + self.a = a + self.b = 11 + + def power(self, p): + print('compute power '+str(p)) + return self.a**p + """ + ), + ) + + self.shell.run_code("second = MyClass(5)") + + for object_name in {"first", "second"}: + self.shell.run_code(f"{object_name}.power(5)") + with self.assertRaises(AttributeError): + self.shell.run_code(f"{object_name}.cube()") + with self.assertRaises(AttributeError): + self.shell.run_code(f"{object_name}.square()") + self.shell.run_code(f"{object_name}.b") + self.shell.run_code(f"{object_name}.a") + with self.assertRaises(AttributeError): + self.shell.run_code(f"{object_name}.toto") + + @skipif_not_numpy + def test_comparing_numpy_structures(self): + self.shell.magic_autoreload("2") + mod_name, mod_fn = self.new_module( + textwrap.dedent( + """ + import numpy as np + class MyClass: + a = (np.array((.1, .2)), + np.array((.2, .3))) + """ + ) + ) + self.shell.run_code("from %s import MyClass" % mod_name) + self.shell.run_code("first = MyClass()") + + # change property `a` + self.write_file( + mod_fn, + textwrap.dedent( + """ + import numpy as np + class MyClass: + a = (np.array((.3, .4)), + np.array((.5, .6))) + """ + ), + ) + + with tt.AssertNotPrints( + ("[autoreload of %s failed:" % mod_name), channel="stderr" + ): + self.shell.run_code("pass") # trigger another reload + + def test_autoload_newly_added_objects(self): + self.shell.magic_autoreload("3") + mod_code = """ + def func1(): pass + """ + mod_name, mod_fn = self.new_module(textwrap.dedent(mod_code)) + self.shell.run_code(f"from {mod_name} import *") + self.shell.run_code("func1()") + with self.assertRaises(NameError): + self.shell.run_code("func2()") + with self.assertRaises(NameError): + self.shell.run_code("t = Test()") + with self.assertRaises(NameError): + self.shell.run_code("number") + + # ----------- TEST NEW OBJ LOADED -------------------------- + + new_code = """ + def func1(): pass + def func2(): pass + class Test: pass + number = 0 + from enum import Enum + class TestEnum(Enum): + A = 'a' + """ + self.write_file(mod_fn, textwrap.dedent(new_code)) + + # test function now exists in shell's namespace namespace + self.shell.run_code("func2()") + # test function now exists in module's dict + self.shell.run_code(f"import sys; sys.modules['{mod_name}'].func2()") + # test class now exists + self.shell.run_code("t = Test()") + # test global built-in var now exists + self.shell.run_code("number") + # test the enumerations gets loaded successfully + self.shell.run_code("TestEnum.A") + + # ----------- TEST NEW OBJ CAN BE CHANGED -------------------- + + new_code = """ + def func1(): return 'changed' + def func2(): return 'changed' + class Test: + def new_func(self): + return 'changed' + number = 1 + from enum import Enum + class TestEnum(Enum): + A = 'a' + B = 'added' + """ + self.write_file(mod_fn, textwrap.dedent(new_code)) + self.shell.run_code("assert func1() == 'changed'") + self.shell.run_code("assert func2() == 'changed'") + self.shell.run_code("t = Test(); assert t.new_func() == 'changed'") + self.shell.run_code("assert number == 1") + if sys.version_info < (3, 12): + self.shell.run_code("assert TestEnum.B.value == 'added'") + + # ----------- TEST IMPORT FROM MODULE -------------------------- + + new_mod_code = """ + from enum import Enum + class Ext(Enum): + A = 'ext' + def ext_func(): + return 'ext' + class ExtTest: + def meth(self): + return 'ext' + ext_int = 2 + """ + new_mod_name, new_mod_fn = self.new_module(textwrap.dedent(new_mod_code)) + current_mod_code = f""" + from {new_mod_name} import * + """ + self.write_file(mod_fn, textwrap.dedent(current_mod_code)) + self.shell.run_code("assert Ext.A.value == 'ext'") + self.shell.run_code("assert ext_func() == 'ext'") + self.shell.run_code("t = ExtTest(); assert t.meth() == 'ext'") + self.shell.run_code("assert ext_int == 2") + + def _check_smoketest(self, use_aimport=True): + """ + Functional test for the automatic reloader using either + '%autoreload 1' or '%autoreload 2' + """ + + mod_name, mod_fn = self.new_module( + """ +x = 9 + +z = 123 # this item will be deleted + +def foo(y): + return y + 3 + +class Baz(object): + def __init__(self, x): + self.x = x + def bar(self, y): + return self.x + y + @property + def quux(self): + return 42 + def zzz(self): + '''This method will be deleted below''' + return 99 + +class Bar: # old-style class: weakref doesn't work for it on Python < 2.7 + def foo(self): + return 1 +""" + ) + + # + # Import module, and mark for reloading + # + if use_aimport: + self.shell.magic_autoreload("1") + self.shell.magic_aimport(mod_name) + stream = StringIO() + self.shell.magic_aimport("", stream=stream) + self.assertIn(("Modules to reload:\n%s" % mod_name), stream.getvalue()) + + with self.assertRaises(ImportError): + self.shell.magic_aimport("tmpmod_as318989e89ds") + else: + self.shell.magic_autoreload("2") + self.shell.run_code("import %s" % mod_name) + stream = StringIO() + self.shell.magic_aimport("", stream=stream) + self.assertTrue( + "Modules to reload:\nall-except-skipped" in stream.getvalue() + ) + self.assertIn(mod_name, self.shell.ns) + + mod = sys.modules[mod_name] + + # + # Test module contents + # + old_foo = mod.foo + old_obj = mod.Baz(9) + old_obj2 = mod.Bar() + + def check_module_contents(): + self.assertEqual(mod.x, 9) + self.assertEqual(mod.z, 123) + + self.assertEqual(old_foo(0), 3) + self.assertEqual(mod.foo(0), 3) + + obj = mod.Baz(9) + self.assertEqual(old_obj.bar(1), 10) + self.assertEqual(obj.bar(1), 10) + self.assertEqual(obj.quux, 42) + self.assertEqual(obj.zzz(), 99) + + obj2 = mod.Bar() + self.assertEqual(old_obj2.foo(), 1) + self.assertEqual(obj2.foo(), 1) + + check_module_contents() + + # + # Simulate a failed reload: no reload should occur and exactly + # one error message should be printed + # + self.write_file( + mod_fn, + """ +a syntax error +""", + ) + + with tt.AssertPrints( + ("[autoreload of %s failed:" % mod_name), channel="stderr" + ): + self.shell.run_code("pass") # trigger reload + with tt.AssertNotPrints( + ("[autoreload of %s failed:" % mod_name), channel="stderr" + ): + self.shell.run_code("pass") # trigger another reload + check_module_contents() + + # + # Rewrite module (this time reload should succeed) + # + self.write_file( + mod_fn, + """ +x = 10 + +def foo(y): + return y + 4 + +class Baz(object): + def __init__(self, x): + self.x = x + def bar(self, y): + return self.x + y + 1 + @property + def quux(self): + return 43 + +class Bar: # old-style class + def foo(self): + return 2 +""", + ) + + def check_module_contents(): + self.assertEqual(mod.x, 10) + self.assertFalse(hasattr(mod, "z")) + + self.assertEqual(old_foo(0), 4) # superreload magic! + self.assertEqual(mod.foo(0), 4) + + obj = mod.Baz(9) + self.assertEqual(old_obj.bar(1), 11) # superreload magic! + self.assertEqual(obj.bar(1), 11) + + self.assertEqual(old_obj.quux, 43) + self.assertEqual(obj.quux, 43) + + self.assertFalse(hasattr(old_obj, "zzz")) + self.assertFalse(hasattr(obj, "zzz")) + + obj2 = mod.Bar() + self.assertEqual(old_obj2.foo(), 2) + self.assertEqual(obj2.foo(), 2) + + self.shell.run_code("pass") # trigger reload + check_module_contents() + + # + # Another failure case: deleted file (shouldn't reload) + # + os.unlink(mod_fn) + + self.shell.run_code("pass") # trigger reload + check_module_contents() + + # + # Disable autoreload and rewrite module: no reload should occur + # + if use_aimport: + self.shell.magic_aimport("-" + mod_name) + stream = StringIO() + self.shell.magic_aimport("", stream=stream) + self.assertTrue(("Modules to skip:\n%s" % mod_name) in stream.getvalue()) + + # This should succeed, although no such module exists + self.shell.magic_aimport("-tmpmod_as318989e89ds") + else: + self.shell.magic_autoreload("0") + + self.write_file( + mod_fn, + """ +x = -99 +""", + ) + + self.shell.run_code("pass") # trigger reload + self.shell.run_code("pass") + check_module_contents() + + # + # Re-enable autoreload: reload should now occur + # + if use_aimport: + self.shell.magic_aimport(mod_name) + else: + self.shell.magic_autoreload("") + + self.shell.run_code("pass") # trigger reload + self.assertEqual(mod.x, -99) + + def test_smoketest_aimport(self): + self._check_smoketest(use_aimport=True) + + def test_smoketest_autoreload(self): + self._check_smoketest(use_aimport=False) diff --git a/myenv/lib/python3.10/site-packages/IPython/extensions/tests/test_storemagic.py b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/test_storemagic.py new file mode 100644 index 000000000..3ac306bcd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/extensions/tests/test_storemagic.py @@ -0,0 +1,66 @@ +import tempfile, os +from pathlib import Path + +from traitlets.config.loader import Config + + +def setup_module(): + ip.magic('load_ext storemagic') + +def test_store_restore(): + assert 'bar' not in ip.user_ns, "Error: some other test leaked `bar` in user_ns" + assert 'foo' not in ip.user_ns, "Error: some other test leaked `foo` in user_ns" + assert 'foobar' not in ip.user_ns, "Error: some other test leaked `foobar` in user_ns" + assert 'foobaz' not in ip.user_ns, "Error: some other test leaked `foobaz` in user_ns" + ip.user_ns['foo'] = 78 + ip.magic('alias bar echo "hello"') + ip.user_ns['foobar'] = 79 + ip.user_ns['foobaz'] = '80' + tmpd = tempfile.mkdtemp() + ip.magic('cd ' + tmpd) + ip.magic('store foo') + ip.magic('store bar') + ip.magic('store foobar foobaz') + + # Check storing + assert ip.db["autorestore/foo"] == 78 + assert "bar" in ip.db["stored_aliases"] + assert ip.db["autorestore/foobar"] == 79 + assert ip.db["autorestore/foobaz"] == "80" + + # Remove those items + ip.user_ns.pop('foo', None) + ip.user_ns.pop('foobar', None) + ip.user_ns.pop('foobaz', None) + ip.alias_manager.undefine_alias('bar') + ip.magic('cd -') + ip.user_ns['_dh'][:] = [] + + # Check restoring + ip.magic("store -r foo bar foobar foobaz") + assert ip.user_ns["foo"] == 78 + assert ip.alias_manager.is_alias("bar") + assert ip.user_ns["foobar"] == 79 + assert ip.user_ns["foobaz"] == "80" + + ip.magic("store -r") # restores _dh too + assert any(Path(tmpd).samefile(p) for p in ip.user_ns["_dh"]) + + os.rmdir(tmpd) + +def test_autorestore(): + ip.user_ns['foo'] = 95 + ip.magic('store foo') + del ip.user_ns['foo'] + c = Config() + c.StoreMagics.autorestore = False + orig_config = ip.config + try: + ip.config = c + ip.extension_manager.reload_extension("storemagic") + assert "foo" not in ip.user_ns + c.StoreMagics.autorestore = True + ip.extension_manager.reload_extension("storemagic") + assert ip.user_ns["foo"] == 95 + finally: + ip.config = orig_config diff --git a/myenv/lib/python3.10/site-packages/IPython/external/__init__.py b/myenv/lib/python3.10/site-packages/IPython/external/__init__.py new file mode 100644 index 000000000..eedc338eb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/external/__init__.py @@ -0,0 +1,7 @@ +""" +This package contains all third-party modules bundled with IPython. +""" + +from typing import List + +__all__: List[str] = [] diff --git a/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..ef991e43c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/qt_for_kernel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/qt_for_kernel.cpython-310.pyc new file mode 100644 index 000000000..93ed55b70 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/qt_for_kernel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/qt_loaders.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/qt_loaders.cpython-310.pyc new file mode 100644 index 000000000..aa44b395e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/external/__pycache__/qt_loaders.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/external/qt_for_kernel.py b/myenv/lib/python3.10/site-packages/IPython/external/qt_for_kernel.py new file mode 100644 index 000000000..b3168f6e2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/external/qt_for_kernel.py @@ -0,0 +1,128 @@ +""" Import Qt in a manner suitable for an IPython kernel. + +This is the import used for the `gui=qt` or `matplotlib=qt` initialization. + +Import Priority: + +if Qt has been imported anywhere else: + use that + +if matplotlib has been imported and doesn't support v2 (<= 1.0.1): + use PyQt4 @v1 + +Next, ask QT_API env variable + +if QT_API not set: + ask matplotlib what it's using. If Qt4Agg or Qt5Agg, then use the + version matplotlib is configured with + + else: (matplotlib said nothing) + # this is the default path - nobody told us anything + try in this order: + PyQt default version, PySide, PyQt5 +else: + use what QT_API says + +""" +# NOTE: This is no longer an external, third-party module, and should be +# considered part of IPython. For compatibility however, it is being kept in +# IPython/external. + +import os +import sys + +from IPython.external.qt_loaders import ( + load_qt, + loaded_api, + enum_factory, + # QT6 + QT_API_PYQT6, + QT_API_PYSIDE6, + # QT5 + QT_API_PYQT5, + QT_API_PYSIDE2, + # QT4 + QT_API_PYQTv1, + QT_API_PYQT, + QT_API_PYSIDE, + # default + QT_API_PYQT_DEFAULT, +) + +_qt_apis = ( + # QT6 + QT_API_PYQT6, + QT_API_PYSIDE6, + # QT5 + QT_API_PYQT5, + QT_API_PYSIDE2, + # QT4 + QT_API_PYQTv1, + QT_API_PYQT, + QT_API_PYSIDE, + # default + QT_API_PYQT_DEFAULT, +) + + +def matplotlib_options(mpl): + """Constraints placed on an imported matplotlib.""" + if mpl is None: + return + backend = mpl.rcParams.get('backend', None) + if backend == 'Qt4Agg': + mpqt = mpl.rcParams.get('backend.qt4', None) + if mpqt is None: + return None + if mpqt.lower() == 'pyside': + return [QT_API_PYSIDE] + elif mpqt.lower() == 'pyqt4': + return [QT_API_PYQT_DEFAULT] + elif mpqt.lower() == 'pyqt4v2': + return [QT_API_PYQT] + raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" % + mpqt) + elif backend == 'Qt5Agg': + mpqt = mpl.rcParams.get('backend.qt5', None) + if mpqt is None: + return None + if mpqt.lower() == 'pyqt5': + return [QT_API_PYQT5] + raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" % + mpqt) + +def get_options(): + """Return a list of acceptable QT APIs, in decreasing order of preference.""" + #already imported Qt somewhere. Use that + loaded = loaded_api() + if loaded is not None: + return [loaded] + + mpl = sys.modules.get('matplotlib', None) + + if mpl is not None and tuple(mpl.__version__.split(".")) < ("1", "0", "2"): + # 1.0.1 only supports PyQt4 v1 + return [QT_API_PYQT_DEFAULT] + + qt_api = os.environ.get('QT_API', None) + if qt_api is None: + #no ETS variable. Ask mpl, then use default fallback path + return matplotlib_options(mpl) or [ + QT_API_PYQT_DEFAULT, + QT_API_PYQT6, + QT_API_PYSIDE6, + QT_API_PYQT5, + QT_API_PYSIDE2, + QT_API_PYQT, + QT_API_PYSIDE, + ] + elif qt_api not in _qt_apis: + raise RuntimeError("Invalid Qt API %r, valid values are: %r" % + (qt_api, ', '.join(_qt_apis))) + else: + return [qt_api] + + +api_opts = get_options() +QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts) +enum_helper = enum_factory(QT_API, QtCore) diff --git a/myenv/lib/python3.10/site-packages/IPython/external/qt_loaders.py b/myenv/lib/python3.10/site-packages/IPython/external/qt_loaders.py new file mode 100644 index 000000000..39ea29846 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/external/qt_loaders.py @@ -0,0 +1,399 @@ +""" +This module contains factory functions that attempt +to return Qt submodules from the various python Qt bindings. + +It also protects against double-importing Qt with different +bindings, which is unstable and likely to crash + +This is used primarily by qt and qt_for_kernel, and shouldn't +be accessed directly from the outside +""" +import importlib.abc +import sys +import types +from functools import partial, lru_cache +import operator + +# ### Available APIs. +# Qt6 +QT_API_PYQT6 = "pyqt6" +QT_API_PYSIDE6 = "pyside6" + +# Qt5 +QT_API_PYQT5 = 'pyqt5' +QT_API_PYSIDE2 = 'pyside2' + +# Qt4 +QT_API_PYQT = "pyqt" # Force version 2 +QT_API_PYQTv1 = "pyqtv1" # Force version 2 +QT_API_PYSIDE = "pyside" + +QT_API_PYQT_DEFAULT = "pyqtdefault" # use system default for version 1 vs. 2 + +api_to_module = { + # Qt6 + QT_API_PYQT6: "PyQt6", + QT_API_PYSIDE6: "PySide6", + # Qt5 + QT_API_PYQT5: "PyQt5", + QT_API_PYSIDE2: "PySide2", + # Qt4 + QT_API_PYSIDE: "PySide", + QT_API_PYQT: "PyQt4", + QT_API_PYQTv1: "PyQt4", + # default + QT_API_PYQT_DEFAULT: "PyQt6", +} + + +class ImportDenier(importlib.abc.MetaPathFinder): + """Import Hook that will guard against bad Qt imports + once IPython commits to a specific binding + """ + + def __init__(self): + self.__forbidden = set() + + def forbid(self, module_name): + sys.modules.pop(module_name, None) + self.__forbidden.add(module_name) + + def find_spec(self, fullname, path, target=None): + if path: + return + if fullname in self.__forbidden: + raise ImportError( + """ + Importing %s disabled by IPython, which has + already imported an Incompatible QT Binding: %s + """ + % (fullname, loaded_api()) + ) + + +ID = ImportDenier() +sys.meta_path.insert(0, ID) + + +def commit_api(api): + """Commit to a particular API, and trigger ImportErrors on subsequent + dangerous imports""" + modules = set(api_to_module.values()) + + modules.remove(api_to_module[api]) + for mod in modules: + ID.forbid(mod) + + +def loaded_api(): + """Return which API is loaded, if any + + If this returns anything besides None, + importing any other Qt binding is unsafe. + + Returns + ------- + None, 'pyside6', 'pyqt6', 'pyside2', 'pyside', 'pyqt', 'pyqt5', 'pyqtv1' + """ + if sys.modules.get("PyQt6.QtCore"): + return QT_API_PYQT6 + elif sys.modules.get("PySide6.QtCore"): + return QT_API_PYSIDE6 + elif sys.modules.get("PyQt5.QtCore"): + return QT_API_PYQT5 + elif sys.modules.get("PySide2.QtCore"): + return QT_API_PYSIDE2 + elif sys.modules.get("PyQt4.QtCore"): + if qtapi_version() == 2: + return QT_API_PYQT + else: + return QT_API_PYQTv1 + elif sys.modules.get("PySide.QtCore"): + return QT_API_PYSIDE + + return None + + +def has_binding(api): + """Safely check for PyQt4/5, PySide or PySide2, without importing submodules + + Parameters + ---------- + api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault'] + Which module to check for + + Returns + ------- + True if the relevant module appears to be importable + """ + module_name = api_to_module[api] + from importlib.util import find_spec + + required = ['QtCore', 'QtGui', 'QtSvg'] + if api in (QT_API_PYQT5, QT_API_PYSIDE2, QT_API_PYQT6, QT_API_PYSIDE6): + # QT5 requires QtWidgets too + required.append('QtWidgets') + + for submod in required: + try: + spec = find_spec('%s.%s' % (module_name, submod)) + except ImportError: + # Package (e.g. PyQt5) not found + return False + else: + if spec is None: + # Submodule (e.g. PyQt5.QtCore) not found + return False + + if api == QT_API_PYSIDE: + # We can also safely check PySide version + import PySide + + return PySide.__version_info__ >= (1, 0, 3) + + return True + + +def qtapi_version(): + """Return which QString API has been set, if any + + Returns + ------- + The QString API version (1 or 2), or None if not set + """ + try: + import sip + except ImportError: + # as of PyQt5 5.11, sip is no longer available as a top-level + # module and needs to be imported from the PyQt5 namespace + try: + from PyQt5 import sip + except ImportError: + return + try: + return sip.getapi('QString') + except ValueError: + return + + +def can_import(api): + """Safely query whether an API is importable, without importing it""" + if not has_binding(api): + return False + + current = loaded_api() + if api == QT_API_PYQT_DEFAULT: + return current in [QT_API_PYQT6, None] + else: + return current in [api, None] + + +def import_pyqt4(version=2): + """ + Import PyQt4 + + Parameters + ---------- + version : 1, 2, or None + Which QString/QVariant API to use. Set to None to use the system + default + ImportErrors raised within this function are non-recoverable + """ + # The new-style string API (version=2) automatically + # converts QStrings to Unicode Python strings. Also, automatically unpacks + # QVariants to their underlying objects. + import sip + + if version is not None: + sip.setapi('QString', version) + sip.setapi('QVariant', version) + + from PyQt4 import QtGui, QtCore, QtSvg + + if QtCore.PYQT_VERSION < 0x040700: + raise ImportError("IPython requires PyQt4 >= 4.7, found %s" % + QtCore.PYQT_VERSION_STR) + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # query for the API version (in case version == None) + version = sip.getapi('QString') + api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT + return QtCore, QtGui, QtSvg, api + + +def import_pyqt5(): + """ + Import PyQt5 + + ImportErrors raised within this function are non-recoverable + """ + + from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType('QtGuiCompat') + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + + api = QT_API_PYQT5 + return QtCore, QtGuiCompat, QtSvg, api + + +def import_pyqt6(): + """ + Import PyQt6 + + ImportErrors raised within this function are non-recoverable + """ + + from PyQt6 import QtCore, QtSvg, QtWidgets, QtGui + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType("QtGuiCompat") + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + + api = QT_API_PYQT6 + return QtCore, QtGuiCompat, QtSvg, api + + +def import_pyside(): + """ + Import PySide + + ImportErrors raised within this function are non-recoverable + """ + from PySide import QtGui, QtCore, QtSvg + return QtCore, QtGui, QtSvg, QT_API_PYSIDE + +def import_pyside2(): + """ + Import PySide2 + + ImportErrors raised within this function are non-recoverable + """ + from PySide2 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType('QtGuiCompat') + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + QtGuiCompat.__dict__.update(QtPrintSupport.__dict__) + + return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE2 + + +def import_pyside6(): + """ + Import PySide6 + + ImportErrors raised within this function are non-recoverable + """ + from PySide6 import QtGui, QtCore, QtSvg, QtWidgets, QtPrintSupport + + # Join QtGui and QtWidgets for Qt4 compatibility. + QtGuiCompat = types.ModuleType("QtGuiCompat") + QtGuiCompat.__dict__.update(QtGui.__dict__) + QtGuiCompat.__dict__.update(QtWidgets.__dict__) + QtGuiCompat.__dict__.update(QtPrintSupport.__dict__) + + return QtCore, QtGuiCompat, QtSvg, QT_API_PYSIDE6 + + +def load_qt(api_options): + """ + Attempt to import Qt, given a preference list + of permissible bindings + + It is safe to call this function multiple times. + + Parameters + ---------- + api_options : List of strings + The order of APIs to try. Valid items are 'pyside', 'pyside2', + 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault' + + Returns + ------- + A tuple of QtCore, QtGui, QtSvg, QT_API + The first three are the Qt modules. The last is the + string indicating which module was loaded. + + Raises + ------ + ImportError, if it isn't possible to import any requested + bindings (either because they aren't installed, or because + an incompatible library has already been installed) + """ + loaders = { + # Qt6 + QT_API_PYQT6: import_pyqt6, + QT_API_PYSIDE6: import_pyside6, + # Qt5 + QT_API_PYQT5: import_pyqt5, + QT_API_PYSIDE2: import_pyside2, + # Qt4 + QT_API_PYSIDE: import_pyside, + QT_API_PYQT: import_pyqt4, + QT_API_PYQTv1: partial(import_pyqt4, version=1), + # default + QT_API_PYQT_DEFAULT: import_pyqt6, + } + + for api in api_options: + + if api not in loaders: + raise RuntimeError( + "Invalid Qt API %r, valid values are: %s" % + (api, ", ".join(["%r" % k for k in loaders.keys()]))) + + if not can_import(api): + continue + + #cannot safely recover from an ImportError during this + result = loaders[api]() + api = result[-1] # changed if api = QT_API_PYQT_DEFAULT + commit_api(api) + return result + else: + raise ImportError(""" + Could not load requested Qt binding. Please ensure that + PyQt4 >= 4.7, PyQt5, PySide >= 1.0.3 or PySide2 is available, + and only one is imported per session. + + Currently-imported Qt library: %r + PyQt4 available (requires QtCore, QtGui, QtSvg): %s + PyQt5 available (requires QtCore, QtGui, QtSvg, QtWidgets): %s + PySide >= 1.0.3 installed: %s + PySide2 installed: %s + Tried to load: %r + """ % (loaded_api(), + has_binding(QT_API_PYQT), + has_binding(QT_API_PYQT5), + has_binding(QT_API_PYSIDE), + has_binding(QT_API_PYSIDE2), + api_options)) + + +def enum_factory(QT_API, QtCore): + """Construct an enum helper to account for PyQt5 <-> PyQt6 changes.""" + + @lru_cache(None) + def _enum(name): + # foo.bar.Enum.Entry (PyQt6) <=> foo.bar.Entry (non-PyQt6). + return operator.attrgetter( + name if QT_API == QT_API_PYQT6 else name.rpartition(".")[0] + )(sys.modules[QtCore.__package__]) + + return _enum diff --git a/myenv/lib/python3.10/site-packages/IPython/external/tests/__init__.py b/myenv/lib/python3.10/site-packages/IPython/external/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/IPython/external/tests/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/external/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..93650ead7 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/external/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/external/tests/__pycache__/test_qt_loaders.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/external/tests/__pycache__/test_qt_loaders.cpython-310.pyc new file mode 100644 index 000000000..dacf58b12 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/external/tests/__pycache__/test_qt_loaders.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/external/tests/test_qt_loaders.py b/myenv/lib/python3.10/site-packages/IPython/external/tests/test_qt_loaders.py new file mode 100644 index 000000000..7bc9ccfa8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/external/tests/test_qt_loaders.py @@ -0,0 +1,11 @@ +import importlib +import pytest +from IPython.external.qt_loaders import ID + + +def test_import_denier(): + ID.forbid("ipython_denied_module") + with pytest.raises(ImportError, match="disabled by IPython"): + import ipython_denied_module + with pytest.raises(ImportError, match="disabled by IPython"): + importlib.import_module("ipython_denied_module") diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__init__.py b/myenv/lib/python3.10/site-packages/IPython/lib/__init__.py new file mode 100644 index 000000000..94b8ade4e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/__init__.py @@ -0,0 +1,11 @@ +# encoding: utf-8 +""" +Extra capabilities for IPython +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..dcb82b737 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/backgroundjobs.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/backgroundjobs.cpython-310.pyc new file mode 100644 index 000000000..00a2644ec Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/backgroundjobs.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/clipboard.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/clipboard.cpython-310.pyc new file mode 100644 index 000000000..ab8e7657a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/clipboard.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/deepreload.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/deepreload.cpython-310.pyc new file mode 100644 index 000000000..4bde79034 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/deepreload.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/demo.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/demo.cpython-310.pyc new file mode 100644 index 000000000..6b663d1e1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/demo.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/display.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/display.cpython-310.pyc new file mode 100644 index 000000000..b4535eb18 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/display.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/editorhooks.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/editorhooks.cpython-310.pyc new file mode 100644 index 000000000..751333051 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/editorhooks.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/guisupport.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/guisupport.cpython-310.pyc new file mode 100644 index 000000000..5b2721c11 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/guisupport.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/latextools.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/latextools.cpython-310.pyc new file mode 100644 index 000000000..15c5a579e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/latextools.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/lexers.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/lexers.cpython-310.pyc new file mode 100644 index 000000000..d52bbbfa1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/lexers.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/pretty.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/pretty.cpython-310.pyc new file mode 100644 index 000000000..6fc338730 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/__pycache__/pretty.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/backgroundjobs.py b/myenv/lib/python3.10/site-packages/IPython/lib/backgroundjobs.py new file mode 100644 index 000000000..e7ad51eb6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/backgroundjobs.py @@ -0,0 +1,491 @@ +# -*- coding: utf-8 -*- +"""Manage background (threaded) jobs conveniently from an interactive shell. + +This module provides a BackgroundJobManager class. This is the main class +meant for public usage, it implements an object which can create and manage +new background jobs. + +It also provides the actual job classes managed by these BackgroundJobManager +objects, see their docstrings below. + + +This system was inspired by discussions with B. Granger and the +BackgroundCommand class described in the book Python Scripting for +Computational Science, by H. P. Langtangen: + +http://folk.uio.no/hpl/scripting + +(although ultimately no code from this text was used, as IPython's system is a +separate implementation). + +An example notebook is provided in our documentation illustrating interactive +use of the system. +""" + +#***************************************************************************** +# Copyright (C) 2005-2006 Fernando Perez +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +# Code begins +import sys +import threading + +from IPython import get_ipython +from IPython.core.ultratb import AutoFormattedTB +from logging import error, debug + + +class BackgroundJobManager(object): + """Class to manage a pool of backgrounded threaded jobs. + + Below, we assume that 'jobs' is a BackgroundJobManager instance. + + Usage summary (see the method docstrings for details): + + jobs.new(...) -> start a new job + + jobs() or jobs.status() -> print status summary of all jobs + + jobs[N] -> returns job number N. + + foo = jobs[N].result -> assign to variable foo the result of job N + + jobs[N].traceback() -> print the traceback of dead job N + + jobs.remove(N) -> remove (finished) job N + + jobs.flush() -> remove all finished jobs + + As a convenience feature, BackgroundJobManager instances provide the + utility result and traceback methods which retrieve the corresponding + information from the jobs list: + + jobs.result(N) <--> jobs[N].result + jobs.traceback(N) <--> jobs[N].traceback() + + While this appears minor, it allows you to use tab completion + interactively on the job manager instance. + """ + + def __init__(self): + # Lists for job management, accessed via a property to ensure they're + # up to date.x + self._running = [] + self._completed = [] + self._dead = [] + # A dict of all jobs, so users can easily access any of them + self.all = {} + # For reporting + self._comp_report = [] + self._dead_report = [] + # Store status codes locally for fast lookups + self._s_created = BackgroundJobBase.stat_created_c + self._s_running = BackgroundJobBase.stat_running_c + self._s_completed = BackgroundJobBase.stat_completed_c + self._s_dead = BackgroundJobBase.stat_dead_c + self._current_job_id = 0 + + @property + def running(self): + self._update_status() + return self._running + + @property + def dead(self): + self._update_status() + return self._dead + + @property + def completed(self): + self._update_status() + return self._completed + + def new(self, func_or_exp, *args, **kwargs): + """Add a new background job and start it in a separate thread. + + There are two types of jobs which can be created: + + 1. Jobs based on expressions which can be passed to an eval() call. + The expression must be given as a string. For example: + + job_manager.new('myfunc(x,y,z=1)'[,glob[,loc]]) + + The given expression is passed to eval(), along with the optional + global/local dicts provided. If no dicts are given, they are + extracted automatically from the caller's frame. + + A Python statement is NOT a valid eval() expression. Basically, you + can only use as an eval() argument something which can go on the right + of an '=' sign and be assigned to a variable. + + For example,"print 'hello'" is not valid, but '2+3' is. + + 2. Jobs given a function object, optionally passing additional + positional arguments: + + job_manager.new(myfunc, x, y) + + The function is called with the given arguments. + + If you need to pass keyword arguments to your function, you must + supply them as a dict named kw: + + job_manager.new(myfunc, x, y, kw=dict(z=1)) + + The reason for this asymmetry is that the new() method needs to + maintain access to its own keywords, and this prevents name collisions + between arguments to new() and arguments to your own functions. + + In both cases, the result is stored in the job.result field of the + background job object. + + You can set `daemon` attribute of the thread by giving the keyword + argument `daemon`. + + Notes and caveats: + + 1. All threads running share the same standard output. Thus, if your + background jobs generate output, it will come out on top of whatever + you are currently writing. For this reason, background jobs are best + used with silent functions which simply return their output. + + 2. Threads also all work within the same global namespace, and this + system does not lock interactive variables. So if you send job to the + background which operates on a mutable object for a long time, and + start modifying that same mutable object interactively (or in another + backgrounded job), all sorts of bizarre behaviour will occur. + + 3. If a background job is spending a lot of time inside a C extension + module which does not release the Python Global Interpreter Lock + (GIL), this will block the IPython prompt. This is simply because the + Python interpreter can only switch between threads at Python + bytecodes. While the execution is inside C code, the interpreter must + simply wait unless the extension module releases the GIL. + + 4. There is no way, due to limitations in the Python threads library, + to kill a thread once it has started.""" + + if callable(func_or_exp): + kw = kwargs.get('kw',{}) + job = BackgroundJobFunc(func_or_exp,*args,**kw) + elif isinstance(func_or_exp, str): + if not args: + frame = sys._getframe(1) + glob, loc = frame.f_globals, frame.f_locals + elif len(args)==1: + glob = loc = args[0] + elif len(args)==2: + glob,loc = args + else: + raise ValueError( + 'Expression jobs take at most 2 args (globals,locals)') + job = BackgroundJobExpr(func_or_exp, glob, loc) + else: + raise TypeError('invalid args for new job') + + if kwargs.get('daemon', False): + job.daemon = True + job.num = self._current_job_id + self._current_job_id += 1 + self.running.append(job) + self.all[job.num] = job + debug('Starting job # %s in a separate thread.' % job.num) + job.start() + return job + + def __getitem__(self, job_key): + num = job_key if isinstance(job_key, int) else job_key.num + return self.all[num] + + def __call__(self): + """An alias to self.status(), + + This allows you to simply call a job manager instance much like the + Unix `jobs` shell command.""" + + return self.status() + + def _update_status(self): + """Update the status of the job lists. + + This method moves finished jobs to one of two lists: + - self.completed: jobs which completed successfully + - self.dead: jobs which finished but died. + + It also copies those jobs to corresponding _report lists. These lists + are used to report jobs completed/dead since the last update, and are + then cleared by the reporting function after each call.""" + + # Status codes + srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead + # State lists, use the actual lists b/c the public names are properties + # that call this very function on access + running, completed, dead = self._running, self._completed, self._dead + + # Now, update all state lists + for num, job in enumerate(running): + stat = job.stat_code + if stat == srun: + continue + elif stat == scomp: + completed.append(job) + self._comp_report.append(job) + running[num] = False + elif stat == sdead: + dead.append(job) + self._dead_report.append(job) + running[num] = False + # Remove dead/completed jobs from running list + running[:] = filter(None, running) + + def _group_report(self,group,name): + """Report summary for a given job group. + + Return True if the group had any elements.""" + + if group: + print('%s jobs:' % name) + for job in group: + print('%s : %s' % (job.num,job)) + print() + return True + + def _group_flush(self,group,name): + """Flush a given job group + + Return True if the group had any elements.""" + + njobs = len(group) + if njobs: + plural = {1:''}.setdefault(njobs,'s') + print('Flushing %s %s job%s.' % (njobs,name,plural)) + group[:] = [] + return True + + def _status_new(self): + """Print the status of newly finished jobs. + + Return True if any new jobs are reported. + + This call resets its own state every time, so it only reports jobs + which have finished since the last time it was called.""" + + self._update_status() + new_comp = self._group_report(self._comp_report, 'Completed') + new_dead = self._group_report(self._dead_report, + 'Dead, call jobs.traceback() for details') + self._comp_report[:] = [] + self._dead_report[:] = [] + return new_comp or new_dead + + def status(self,verbose=0): + """Print a status of all jobs currently being managed.""" + + self._update_status() + self._group_report(self.running,'Running') + self._group_report(self.completed,'Completed') + self._group_report(self.dead,'Dead') + # Also flush the report queues + self._comp_report[:] = [] + self._dead_report[:] = [] + + def remove(self,num): + """Remove a finished (completed or dead) job.""" + + try: + job = self.all[num] + except KeyError: + error('Job #%s not found' % num) + else: + stat_code = job.stat_code + if stat_code == self._s_running: + error('Job #%s is still running, it can not be removed.' % num) + return + elif stat_code == self._s_completed: + self.completed.remove(job) + elif stat_code == self._s_dead: + self.dead.remove(job) + + def flush(self): + """Flush all finished jobs (completed and dead) from lists. + + Running jobs are never flushed. + + It first calls _status_new(), to update info. If any jobs have + completed since the last _status_new() call, the flush operation + aborts.""" + + # Remove the finished jobs from the master dict + alljobs = self.all + for job in self.completed+self.dead: + del(alljobs[job.num]) + + # Now flush these lists completely + fl_comp = self._group_flush(self.completed, 'Completed') + fl_dead = self._group_flush(self.dead, 'Dead') + if not (fl_comp or fl_dead): + print('No jobs to flush.') + + def result(self,num): + """result(N) -> return the result of job N.""" + try: + return self.all[num].result + except KeyError: + error('Job #%s not found' % num) + + def _traceback(self, job): + num = job if isinstance(job, int) else job.num + try: + self.all[num].traceback() + except KeyError: + error('Job #%s not found' % num) + + def traceback(self, job=None): + if job is None: + self._update_status() + for deadjob in self.dead: + print("Traceback for: %r" % deadjob) + self._traceback(deadjob) + print() + else: + self._traceback(job) + + +class BackgroundJobBase(threading.Thread): + """Base class to build BackgroundJob classes. + + The derived classes must implement: + + - Their own __init__, since the one here raises NotImplementedError. The + derived constructor must call self._init() at the end, to provide common + initialization. + + - A strform attribute used in calls to __str__. + + - A call() method, which will make the actual execution call and must + return a value to be held in the 'result' field of the job object. + """ + + # Class constants for status, in string and as numerical codes (when + # updating jobs lists, we don't want to do string comparisons). This will + # be done at every user prompt, so it has to be as fast as possible + stat_created = 'Created'; stat_created_c = 0 + stat_running = 'Running'; stat_running_c = 1 + stat_completed = 'Completed'; stat_completed_c = 2 + stat_dead = 'Dead (Exception), call jobs.traceback() for details' + stat_dead_c = -1 + + def __init__(self): + """Must be implemented in subclasses. + + Subclasses must call :meth:`_init` for standard initialisation. + """ + raise NotImplementedError("This class can not be instantiated directly.") + + def _init(self): + """Common initialization for all BackgroundJob objects""" + + for attr in ['call','strform']: + assert hasattr(self,attr), "Missing attribute <%s>" % attr + + # The num tag can be set by an external job manager + self.num = None + + self.status = BackgroundJobBase.stat_created + self.stat_code = BackgroundJobBase.stat_created_c + self.finished = False + self.result = '' + + # reuse the ipython traceback handler if we can get to it, otherwise + # make a new one + try: + make_tb = get_ipython().InteractiveTB.text + except: + make_tb = AutoFormattedTB(mode = 'Context', + color_scheme='NoColor', + tb_offset = 1).text + # Note that the actual API for text() requires the three args to be + # passed in, so we wrap it in a simple lambda. + self._make_tb = lambda : make_tb(None, None, None) + + # Hold a formatted traceback if one is generated. + self._tb = None + + threading.Thread.__init__(self) + + def __str__(self): + return self.strform + + def __repr__(self): + return '' % (self.num, self.strform) + + def traceback(self): + print(self._tb) + + def run(self): + try: + self.status = BackgroundJobBase.stat_running + self.stat_code = BackgroundJobBase.stat_running_c + self.result = self.call() + except: + self.status = BackgroundJobBase.stat_dead + self.stat_code = BackgroundJobBase.stat_dead_c + self.finished = None + self.result = ('') + self._tb = self._make_tb() + else: + self.status = BackgroundJobBase.stat_completed + self.stat_code = BackgroundJobBase.stat_completed_c + self.finished = True + + +class BackgroundJobExpr(BackgroundJobBase): + """Evaluate an expression as a background job (uses a separate thread).""" + + def __init__(self, expression, glob=None, loc=None): + """Create a new job from a string which can be fed to eval(). + + global/locals dicts can be provided, which will be passed to the eval + call.""" + + # fail immediately if the given expression can't be compiled + self.code = compile(expression,'','eval') + + glob = {} if glob is None else glob + loc = {} if loc is None else loc + self.expression = self.strform = expression + self.glob = glob + self.loc = loc + self._init() + + def call(self): + return eval(self.code,self.glob,self.loc) + + +class BackgroundJobFunc(BackgroundJobBase): + """Run a function call as a background job (uses a separate thread).""" + + def __init__(self, func, *args, **kwargs): + """Create a new job from a callable object. + + Any positional arguments and keyword args given to this constructor + after the initial callable are passed directly to it.""" + + if not callable(func): + raise TypeError( + 'first argument to BackgroundJobFunc must be callable') + + self.func = func + self.args = args + self.kwargs = kwargs + # The string form will only include the function passed, because + # generating string representations of the arguments is a potentially + # _very_ expensive operation (e.g. with large arrays). + self.strform = str(func) + self._init() + + def call(self): + return self.func(*self.args, **self.kwargs) diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/clipboard.py b/myenv/lib/python3.10/site-packages/IPython/lib/clipboard.py new file mode 100644 index 000000000..1d691a7ea --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/clipboard.py @@ -0,0 +1,101 @@ +""" Utilities for accessing the platform's clipboard. +""" +import os +import subprocess + +from IPython.core.error import TryNext +import IPython.utils.py3compat as py3compat + + +class ClipboardEmpty(ValueError): + pass + + +def win32_clipboard_get(): + """ Get the current clipboard's text on Windows. + + Requires Mark Hammond's pywin32 extensions. + """ + try: + import win32clipboard + except ImportError as e: + raise TryNext("Getting text from the clipboard requires the pywin32 " + "extensions: http://sourceforge.net/projects/pywin32/") from e + win32clipboard.OpenClipboard() + try: + text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) + except (TypeError, win32clipboard.error): + try: + text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT) + text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) + except (TypeError, win32clipboard.error) as e: + raise ClipboardEmpty from e + finally: + win32clipboard.CloseClipboard() + return text + + +def osx_clipboard_get() -> str: + """ Get the clipboard's text on OS X. + """ + p = subprocess.Popen(['pbpaste', '-Prefer', 'ascii'], + stdout=subprocess.PIPE) + bytes_, stderr = p.communicate() + # Text comes in with old Mac \r line endings. Change them to \n. + bytes_ = bytes_.replace(b'\r', b'\n') + text = py3compat.decode(bytes_) + return text + + +def tkinter_clipboard_get(): + """ Get the clipboard's text using Tkinter. + + This is the default on systems that are not Windows or OS X. It may + interfere with other UI toolkits and should be replaced with an + implementation that uses that toolkit. + """ + try: + from tkinter import Tk, TclError + except ImportError as e: + raise TryNext("Getting text from the clipboard on this platform requires tkinter.") from e + + root = Tk() + root.withdraw() + try: + text = root.clipboard_get() + except TclError as e: + raise ClipboardEmpty from e + finally: + root.destroy() + text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) + return text + + +def wayland_clipboard_get(): + """Get the clipboard's text under Wayland using wl-paste command. + + This requires Wayland and wl-clipboard installed and running. + """ + if os.environ.get("XDG_SESSION_TYPE") != "wayland": + raise TryNext("wayland is not detected") + + try: + with subprocess.Popen(["wl-paste"], stdout=subprocess.PIPE) as p: + raw, err = p.communicate() + if p.wait(): + raise TryNext(err) + except FileNotFoundError as e: + raise TryNext( + "Getting text from the clipboard under Wayland requires the wl-clipboard " + "extension: https://github.com/bugaevc/wl-clipboard" + ) from e + + if not raw: + raise ClipboardEmpty + + try: + text = py3compat.decode(raw) + except UnicodeDecodeError as e: + raise ClipboardEmpty from e + + return text diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/deepreload.py b/myenv/lib/python3.10/site-packages/IPython/lib/deepreload.py new file mode 100644 index 000000000..aaedab242 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/deepreload.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +""" +Provides a reload() function that acts recursively. + +Python's normal :func:`python:reload` function only reloads the module that it's +passed. The :func:`reload` function in this module also reloads everything +imported from that module, which is useful when you're changing files deep +inside a package. + +To use this as your default reload function, type this:: + + import builtins + from IPython.lib import deepreload + builtins.reload = deepreload.reload + +A reference to the original :func:`python:reload` is stored in this module as +:data:`original_reload`, so you can restore it later. + +This code is almost entirely based on knee.py, which is a Python +re-implementation of hierarchical module import. +""" +#***************************************************************************** +# Copyright (C) 2001 Nathaniel Gray +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#***************************************************************************** + +import builtins as builtin_mod +from contextlib import contextmanager +import importlib +import sys + +from types import ModuleType +from warnings import warn +import types + +original_import = builtin_mod.__import__ + +@contextmanager +def replace_import_hook(new_import): + saved_import = builtin_mod.__import__ + builtin_mod.__import__ = new_import + try: + yield + finally: + builtin_mod.__import__ = saved_import + +def get_parent(globals, level): + """ + parent, name = get_parent(globals, level) + + Return the package that an import is being performed in. If globals comes + from the module foo.bar.bat (not itself a package), this returns the + sys.modules entry for foo.bar. If globals is from a package's __init__.py, + the package's entry in sys.modules is returned. + + If globals doesn't come from a package or a module in a package, or a + corresponding entry is not found in sys.modules, None is returned. + """ + orig_level = level + + if not level or not isinstance(globals, dict): + return None, '' + + pkgname = globals.get('__package__', None) + + if pkgname is not None: + # __package__ is set, so use it + if not hasattr(pkgname, 'rindex'): + raise ValueError('__package__ set to non-string') + if len(pkgname) == 0: + if level > 0: + raise ValueError('Attempted relative import in non-package') + return None, '' + name = pkgname + else: + # __package__ not set, so figure it out and set it + if '__name__' not in globals: + return None, '' + modname = globals['__name__'] + + if '__path__' in globals: + # __path__ is set, so modname is already the package name + globals['__package__'] = name = modname + else: + # Normal module, so work out the package name if any + lastdot = modname.rfind('.') + if lastdot < 0 < level: + raise ValueError("Attempted relative import in non-package") + if lastdot < 0: + globals['__package__'] = None + return None, '' + globals['__package__'] = name = modname[:lastdot] + + dot = len(name) + for x in range(level, 1, -1): + try: + dot = name.rindex('.', 0, dot) + except ValueError as e: + raise ValueError("attempted relative import beyond top-level " + "package") from e + name = name[:dot] + + try: + parent = sys.modules[name] + except BaseException as e: + if orig_level < 1: + warn("Parent module '%.200s' not found while handling absolute " + "import" % name) + parent = None + else: + raise SystemError("Parent module '%.200s' not loaded, cannot " + "perform relative import" % name) from e + + # We expect, but can't guarantee, if parent != None, that: + # - parent.__name__ == name + # - parent.__dict__ is globals + # If this is violated... Who cares? + return parent, name + +def load_next(mod, altmod, name, buf): + """ + mod, name, buf = load_next(mod, altmod, name, buf) + + altmod is either None or same as mod + """ + + if len(name) == 0: + # completely empty module name should only happen in + # 'from . import' (or '__import__("")') + return mod, None, buf + + dot = name.find('.') + if dot == 0: + raise ValueError('Empty module name') + + if dot < 0: + subname = name + next = None + else: + subname = name[:dot] + next = name[dot+1:] + + if buf != '': + buf += '.' + buf += subname + + result = import_submodule(mod, subname, buf) + if result is None and mod != altmod: + result = import_submodule(altmod, subname, subname) + if result is not None: + buf = subname + + if result is None: + raise ImportError("No module named %.200s" % name) + + return result, next, buf + + +# Need to keep track of what we've already reloaded to prevent cyclic evil +found_now = {} + +def import_submodule(mod, subname, fullname): + """m = import_submodule(mod, subname, fullname)""" + # Require: + # if mod == None: subname == fullname + # else: mod.__name__ + "." + subname == fullname + + global found_now + if fullname in found_now and fullname in sys.modules: + m = sys.modules[fullname] + else: + print('Reloading', fullname) + found_now[fullname] = 1 + oldm = sys.modules.get(fullname, None) + try: + if oldm is not None: + m = importlib.reload(oldm) + else: + m = importlib.import_module(subname, mod) + except: + # load_module probably removed name from modules because of + # the error. Put back the original module object. + if oldm: + sys.modules[fullname] = oldm + raise + + add_submodule(mod, m, fullname, subname) + + return m + +def add_submodule(mod, submod, fullname, subname): + """mod.{subname} = submod""" + if mod is None: + return #Nothing to do here. + + if submod is None: + submod = sys.modules[fullname] + + setattr(mod, subname, submod) + + return + +def ensure_fromlist(mod, fromlist, buf, recursive): + """Handle 'from module import a, b, c' imports.""" + if not hasattr(mod, '__path__'): + return + for item in fromlist: + if not hasattr(item, 'rindex'): + raise TypeError("Item in ``from list'' not a string") + if item == '*': + if recursive: + continue # avoid endless recursion + try: + all = mod.__all__ + except AttributeError: + pass + else: + ret = ensure_fromlist(mod, all, buf, 1) + if not ret: + return 0 + elif not hasattr(mod, item): + import_submodule(mod, item, buf + '.' + item) + +def deep_import_hook(name, globals=None, locals=None, fromlist=None, level=-1): + """Replacement for __import__()""" + parent, buf = get_parent(globals, level) + + head, name, buf = load_next(parent, None if level < 0 else parent, name, buf) + + tail = head + while name: + tail, name, buf = load_next(tail, tail, name, buf) + + # If tail is None, both get_parent and load_next found + # an empty module name: someone called __import__("") or + # doctored faulty bytecode + if tail is None: + raise ValueError('Empty module name') + + if not fromlist: + return head + + ensure_fromlist(tail, fromlist, buf, 0) + return tail + +modules_reloading = {} + +def deep_reload_hook(m): + """Replacement for reload().""" + # Hardcode this one as it would raise a NotImplementedError from the + # bowels of Python and screw up the import machinery after. + # unlike other imports the `exclude` list already in place is not enough. + + if m is types: + return m + if not isinstance(m, ModuleType): + raise TypeError("reload() argument must be module") + + name = m.__name__ + + if name not in sys.modules: + raise ImportError("reload(): module %.200s not in sys.modules" % name) + + global modules_reloading + try: + return modules_reloading[name] + except: + modules_reloading[name] = m + + try: + newm = importlib.reload(m) + except: + sys.modules[name] = m + raise + finally: + modules_reloading.clear() + return newm + +# Save the original hooks +original_reload = importlib.reload + +# Replacement for reload() +def reload( + module, + exclude=( + *sys.builtin_module_names, + "sys", + "os.path", + "builtins", + "__main__", + "numpy", + "numpy._globals", + ), +): + """Recursively reload all modules used in the given module. Optionally + takes a list of modules to exclude from reloading. The default exclude + list contains modules listed in sys.builtin_module_names with additional + sys, os.path, builtins and __main__, to prevent, e.g., resetting + display, exception, and io hooks. + """ + global found_now + for i in exclude: + found_now[i] = 1 + try: + with replace_import_hook(deep_import_hook): + return deep_reload_hook(module) + finally: + found_now = {} diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/demo.py b/myenv/lib/python3.10/site-packages/IPython/lib/demo.py new file mode 100644 index 000000000..ebffd54ab --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/demo.py @@ -0,0 +1,672 @@ +"""Module for interactive demos using IPython. + +This module implements a few classes for running Python scripts interactively +in IPython for demonstrations. With very simple markup (a few tags in +comments), you can control points where the script stops executing and returns +control to IPython. + + +Provided classes +---------------- + +The classes are (see their docstrings for further details): + + - Demo: pure python demos + + - IPythonDemo: demos with input to be processed by IPython as if it had been + typed interactively (so magics work, as well as any other special syntax you + may have added via input prefilters). + + - LineDemo: single-line version of the Demo class. These demos are executed + one line at a time, and require no markup. + + - IPythonLineDemo: IPython version of the LineDemo class (the demo is + executed a line at a time, but processed via IPython). + + - ClearMixin: mixin to make Demo classes with less visual clutter. It + declares an empty marquee and a pre_cmd that clears the screen before each + block (see Subclassing below). + + - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo + classes. + +Inheritance diagram: + +.. inheritance-diagram:: IPython.lib.demo + :parts: 3 + +Subclassing +----------- + +The classes here all include a few methods meant to make customization by +subclassing more convenient. Their docstrings below have some more details: + + - highlight(): format every block and optionally highlight comments and + docstring content. + + - marquee(): generates a marquee to provide visible on-screen markers at each + block start and end. + + - pre_cmd(): run right before the execution of each block. + + - post_cmd(): run right after the execution of each block. If the block + raises an exception, this is NOT called. + + +Operation +--------- + +The file is run in its own empty namespace (though you can pass it a string of +arguments as if in a command line environment, and it will see those as +sys.argv). But at each stop, the global IPython namespace is updated with the +current internal demo namespace, so you can work interactively with the data +accumulated so far. + +By default, each block of code is printed (with syntax highlighting) before +executing it and you have to confirm execution. This is intended to show the +code to an audience first so you can discuss it, and only proceed with +execution once you agree. There are a few tags which allow you to modify this +behavior. + +The supported tags are: + +# stop + + Defines block boundaries, the points where IPython stops execution of the + file and returns to the interactive prompt. + + You can optionally mark the stop tag with extra dashes before and after the + word 'stop', to help visually distinguish the blocks in a text editor: + + # --- stop --- + + +# silent + + Make a block execute silently (and hence automatically). Typically used in + cases where you have some boilerplate or initialization code which you need + executed but do not want to be seen in the demo. + +# auto + + Make a block execute automatically, but still being printed. Useful for + simple code which does not warrant discussion, since it avoids the extra + manual confirmation. + +# auto_all + + This tag can _only_ be in the first block, and if given it overrides the + individual auto tags to make the whole demo fully automatic (no block asks + for confirmation). It can also be given at creation time (or the attribute + set later) to override what's in the file. + +While _any_ python file can be run as a Demo instance, if there are no stop +tags the whole file will run in a single block (no different that calling +first %pycat and then %run). The minimal markup to make this useful is to +place a set of stop tags; the other tags are only there to let you fine-tune +the execution. + +This is probably best explained with the simple example file below. You can +copy this into a file named ex_demo.py, and try running it via:: + + from IPython.lib.demo import Demo + d = Demo('ex_demo.py') + d() + +Each time you call the demo object, it runs the next block. The demo object +has a few useful methods for navigation, like again(), edit(), jump(), seek() +and back(). It can be reset for a new run via reset() or reloaded from disk +(in case you've edited the source) via reload(). See their docstrings below. + +Note: To make this simpler to explore, a file called "demo-exercizer.py" has +been added to the "docs/examples/core" directory. Just cd to this directory in +an IPython session, and type:: + + %run demo-exercizer.py + +and then follow the directions. + +Example +------- + +The following is a very simple example of a valid demo file. + +:: + + #################### EXAMPLE DEMO ############################### + '''A simple interactive demo to illustrate the use of IPython's Demo class.''' + + print('Hello, welcome to an interactive IPython demo.') + + # The mark below defines a block boundary, which is a point where IPython will + # stop execution and return to the interactive prompt. The dashes are actually + # optional and used only as a visual aid to clearly separate blocks while + # editing the demo code. + # stop + + x = 1 + y = 2 + + # stop + + # the mark below makes this block as silent + # silent + + print('This is a silent block, which gets executed but not printed.') + + # stop + # auto + print('This is an automatic block.') + print('It is executed without asking for confirmation, but printed.') + z = x + y + + print('z =', x) + + # stop + # This is just another normal block. + print('z is now:', z) + + print('bye!') + ################### END EXAMPLE DEMO ############################ +""" + + +#***************************************************************************** +# Copyright (C) 2005-2006 Fernando Perez. +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +# +#***************************************************************************** + +import os +import re +import shlex +import sys +import pygments +from pathlib import Path + +from IPython.utils.text import marquee +from IPython.utils import openpy +from IPython.utils import py3compat +__all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError'] + +class DemoError(Exception): pass + +def re_mark(mark): + return re.compile(r'^\s*#\s+\s+%s\s*$' % mark,re.MULTILINE) + +class Demo(object): + + re_stop = re_mark(r'-*\s?stop\s?-*') + re_silent = re_mark('silent') + re_auto = re_mark('auto') + re_auto_all = re_mark('auto_all') + + def __init__(self,src,title='',arg_str='',auto_all=None, format_rst=False, + formatter='terminal', style='default'): + """Make a new demo object. To run the demo, simply call the object. + + See the module docstring for full details and an example (you can use + IPython.Demo? in IPython to see it). + + Inputs: + + - src is either a file, or file-like object, or a + string that can be resolved to a filename. + + Optional inputs: + + - title: a string to use as the demo name. Of most use when the demo + you are making comes from an object that has no filename, or if you + want an alternate denotation distinct from the filename. + + - arg_str(''): a string of arguments, internally converted to a list + just like sys.argv, so the demo script can see a similar + environment. + + - auto_all(None): global flag to run all blocks automatically without + confirmation. This attribute overrides the block-level tags and + applies to the whole demo. It is an attribute of the object, and + can be changed at runtime simply by reassigning it to a boolean + value. + + - format_rst(False): a bool to enable comments and doc strings + formatting with pygments rst lexer + + - formatter('terminal'): a string of pygments formatter name to be + used. Useful values for terminals: terminal, terminal256, + terminal16m + + - style('default'): a string of pygments style name to be used. + """ + if hasattr(src, "read"): + # It seems to be a file or a file-like object + self.fname = "from a file-like object" + if title == '': + self.title = "from a file-like object" + else: + self.title = title + else: + # Assume it's a string or something that can be converted to one + self.fname = src + if title == '': + (filepath, filename) = os.path.split(src) + self.title = filename + else: + self.title = title + self.sys_argv = [src] + shlex.split(arg_str) + self.auto_all = auto_all + self.src = src + + try: + ip = get_ipython() # this is in builtins whenever IPython is running + self.inside_ipython = True + except NameError: + self.inside_ipython = False + + if self.inside_ipython: + # get a few things from ipython. While it's a bit ugly design-wise, + # it ensures that things like color scheme and the like are always in + # sync with the ipython mode being used. This class is only meant to + # be used inside ipython anyways, so it's OK. + self.ip_ns = ip.user_ns + self.ip_colorize = ip.pycolorize + self.ip_showtb = ip.showtraceback + self.ip_run_cell = ip.run_cell + self.shell = ip + + self.formatter = pygments.formatters.get_formatter_by_name(formatter, + style=style) + self.python_lexer = pygments.lexers.get_lexer_by_name("py3") + self.format_rst = format_rst + if format_rst: + self.rst_lexer = pygments.lexers.get_lexer_by_name("rst") + + # load user data and initialize data structures + self.reload() + + def fload(self): + """Load file object.""" + # read data and parse into blocks + if hasattr(self, 'fobj') and self.fobj is not None: + self.fobj.close() + if hasattr(self.src, "read"): + # It seems to be a file or a file-like object + self.fobj = self.src + else: + # Assume it's a string or something that can be converted to one + self.fobj = openpy.open(self.fname) + + def reload(self): + """Reload source from disk and initialize state.""" + self.fload() + + self.src = "".join(openpy.strip_encoding_cookie(self.fobj)) + src_b = [b.strip() for b in self.re_stop.split(self.src) if b] + self._silent = [bool(self.re_silent.findall(b)) for b in src_b] + self._auto = [bool(self.re_auto.findall(b)) for b in src_b] + + # if auto_all is not given (def. None), we read it from the file + if self.auto_all is None: + self.auto_all = bool(self.re_auto_all.findall(src_b[0])) + else: + self.auto_all = bool(self.auto_all) + + # Clean the sources from all markup so it doesn't get displayed when + # running the demo + src_blocks = [] + auto_strip = lambda s: self.re_auto.sub('',s) + for i,b in enumerate(src_b): + if self._auto[i]: + src_blocks.append(auto_strip(b)) + else: + src_blocks.append(b) + # remove the auto_all marker + src_blocks[0] = self.re_auto_all.sub('',src_blocks[0]) + + self.nblocks = len(src_blocks) + self.src_blocks = src_blocks + + # also build syntax-highlighted source + self.src_blocks_colored = list(map(self.highlight,self.src_blocks)) + + # ensure clean namespace and seek offset + self.reset() + + def reset(self): + """Reset the namespace and seek pointer to restart the demo""" + self.user_ns = {} + self.finished = False + self.block_index = 0 + + def _validate_index(self,index): + if index<0 or index>=self.nblocks: + raise ValueError('invalid block index %s' % index) + + def _get_index(self,index): + """Get the current block index, validating and checking status. + + Returns None if the demo is finished""" + + if index is None: + if self.finished: + print('Demo finished. Use .reset() if you want to rerun it.') + return None + index = self.block_index + else: + self._validate_index(index) + return index + + def seek(self,index): + """Move the current seek pointer to the given block. + + You can use negative indices to seek from the end, with identical + semantics to those of Python lists.""" + if index<0: + index = self.nblocks + index + self._validate_index(index) + self.block_index = index + self.finished = False + + def back(self,num=1): + """Move the seek pointer back num blocks (default is 1).""" + self.seek(self.block_index-num) + + def jump(self,num=1): + """Jump a given number of blocks relative to the current one. + + The offset can be positive or negative, defaults to 1.""" + self.seek(self.block_index+num) + + def again(self): + """Move the seek pointer back one block and re-execute.""" + self.back(1) + self() + + def edit(self,index=None): + """Edit a block. + + If no number is given, use the last block executed. + + This edits the in-memory copy of the demo, it does NOT modify the + original source file. If you want to do that, simply open the file in + an editor and use reload() when you make changes to the file. This + method is meant to let you change a block during a demonstration for + explanatory purposes, without damaging your original script.""" + + index = self._get_index(index) + if index is None: + return + # decrease the index by one (unless we're at the very beginning), so + # that the default demo.edit() call opens up the sblock we've last run + if index>0: + index -= 1 + + filename = self.shell.mktempfile(self.src_blocks[index]) + self.shell.hooks.editor(filename, 1) + with open(Path(filename), "r", encoding="utf-8") as f: + new_block = f.read() + # update the source and colored block + self.src_blocks[index] = new_block + self.src_blocks_colored[index] = self.highlight(new_block) + self.block_index = index + # call to run with the newly edited index + self() + + def show(self,index=None): + """Show a single block on screen""" + + index = self._get_index(index) + if index is None: + return + + print(self.marquee('<%s> block # %s (%s remaining)' % + (self.title,index,self.nblocks-index-1))) + print(self.src_blocks_colored[index]) + sys.stdout.flush() + + def show_all(self): + """Show entire demo on screen, block by block""" + + fname = self.title + title = self.title + nblocks = self.nblocks + silent = self._silent + marquee = self.marquee + for index,block in enumerate(self.src_blocks_colored): + if silent[index]: + print(marquee('<%s> SILENT block # %s (%s remaining)' % + (title,index,nblocks-index-1))) + else: + print(marquee('<%s> block # %s (%s remaining)' % + (title,index,nblocks-index-1))) + print(block, end=' ') + sys.stdout.flush() + + def run_cell(self,source): + """Execute a string with one or more lines of code""" + + exec(source, self.user_ns) + + def __call__(self,index=None): + """run a block of the demo. + + If index is given, it should be an integer >=1 and <= nblocks. This + means that the calling convention is one off from typical Python + lists. The reason for the inconsistency is that the demo always + prints 'Block n/N, and N is the total, so it would be very odd to use + zero-indexing here.""" + + index = self._get_index(index) + if index is None: + return + try: + marquee = self.marquee + next_block = self.src_blocks[index] + self.block_index += 1 + if self._silent[index]: + print(marquee('Executing silent block # %s (%s remaining)' % + (index,self.nblocks-index-1))) + else: + self.pre_cmd() + self.show(index) + if self.auto_all or self._auto[index]: + print(marquee('output:')) + else: + print(marquee('Press to quit, to execute...'), end=' ') + ans = py3compat.input().strip() + if ans: + print(marquee('Block NOT executed')) + return + try: + save_argv = sys.argv + sys.argv = self.sys_argv + self.run_cell(next_block) + self.post_cmd() + finally: + sys.argv = save_argv + + except: + if self.inside_ipython: + self.ip_showtb(filename=self.fname) + else: + if self.inside_ipython: + self.ip_ns.update(self.user_ns) + + if self.block_index == self.nblocks: + mq1 = self.marquee('END OF DEMO') + if mq1: + # avoid spurious print if empty marquees are used + print() + print(mq1) + print(self.marquee('Use .reset() if you want to rerun it.')) + self.finished = True + + # These methods are meant to be overridden by subclasses who may wish to + # customize the behavior of of their demos. + def marquee(self,txt='',width=78,mark='*'): + """Return the input string centered in a 'marquee'.""" + return marquee(txt,width,mark) + + def pre_cmd(self): + """Method called before executing each block.""" + pass + + def post_cmd(self): + """Method called after executing each block.""" + pass + + def highlight(self, block): + """Method called on each block to highlight it content""" + tokens = pygments.lex(block, self.python_lexer) + if self.format_rst: + from pygments.token import Token + toks = [] + for token in tokens: + if token[0] == Token.String.Doc and len(token[1]) > 6: + toks += pygments.lex(token[1][:3], self.python_lexer) + # parse doc string content by rst lexer + toks += pygments.lex(token[1][3:-3], self.rst_lexer) + toks += pygments.lex(token[1][-3:], self.python_lexer) + elif token[0] == Token.Comment.Single: + toks.append((Token.Comment.Single, token[1][0])) + # parse comment content by rst lexer + # remove the extra newline added by rst lexer + toks += list(pygments.lex(token[1][1:], self.rst_lexer))[:-1] + else: + toks.append(token) + tokens = toks + return pygments.format(tokens, self.formatter) + + +class IPythonDemo(Demo): + """Class for interactive demos with IPython's input processing applied. + + This subclasses Demo, but instead of executing each block by the Python + interpreter (via exec), it actually calls IPython on it, so that any input + filters which may be in place are applied to the input block. + + If you have an interactive environment which exposes special input + processing, you can use this class instead to write demo scripts which + operate exactly as if you had typed them interactively. The default Demo + class requires the input to be valid, pure Python code. + """ + + def run_cell(self,source): + """Execute a string with one or more lines of code""" + + self.shell.run_cell(source) + +class LineDemo(Demo): + """Demo where each line is executed as a separate block. + + The input script should be valid Python code. + + This class doesn't require any markup at all, and it's meant for simple + scripts (with no nesting or any kind of indentation) which consist of + multiple lines of input to be executed, one at a time, as if they had been + typed in the interactive prompt. + + Note: the input can not have *any* indentation, which means that only + single-lines of input are accepted, not even function definitions are + valid.""" + + def reload(self): + """Reload source from disk and initialize state.""" + # read data and parse into blocks + self.fload() + lines = self.fobj.readlines() + src_b = [l for l in lines if l.strip()] + nblocks = len(src_b) + self.src = ''.join(lines) + self._silent = [False]*nblocks + self._auto = [True]*nblocks + self.auto_all = True + self.nblocks = nblocks + self.src_blocks = src_b + + # also build syntax-highlighted source + self.src_blocks_colored = list(map(self.highlight,self.src_blocks)) + + # ensure clean namespace and seek offset + self.reset() + + +class IPythonLineDemo(IPythonDemo,LineDemo): + """Variant of the LineDemo class whose input is processed by IPython.""" + pass + + +class ClearMixin(object): + """Use this mixin to make Demo classes with less visual clutter. + + Demos using this mixin will clear the screen before every block and use + blank marquees. + + Note that in order for the methods defined here to actually override those + of the classes it's mixed with, it must go /first/ in the inheritance + tree. For example: + + class ClearIPDemo(ClearMixin,IPythonDemo): pass + + will provide an IPythonDemo class with the mixin's features. + """ + + def marquee(self,txt='',width=78,mark='*'): + """Blank marquee that returns '' no matter what the input.""" + return '' + + def pre_cmd(self): + """Method called before executing each block. + + This one simply clears the screen.""" + from IPython.utils.terminal import _term_clear + _term_clear() + +class ClearDemo(ClearMixin,Demo): + pass + + +class ClearIPDemo(ClearMixin,IPythonDemo): + pass + + +def slide(file_path, noclear=False, format_rst=True, formatter="terminal", + style="native", auto_all=False, delimiter='...'): + if noclear: + demo_class = Demo + else: + demo_class = ClearDemo + demo = demo_class(file_path, format_rst=format_rst, formatter=formatter, + style=style, auto_all=auto_all) + while not demo.finished: + demo() + try: + py3compat.input('\n' + delimiter) + except KeyboardInterrupt: + exit(1) + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser(description='Run python demos') + parser.add_argument('--noclear', '-C', action='store_true', + help='Do not clear terminal on each slide') + parser.add_argument('--rst', '-r', action='store_true', + help='Highlight comments and dostrings as rst') + parser.add_argument('--formatter', '-f', default='terminal', + help='pygments formatter name could be: terminal, ' + 'terminal256, terminal16m') + parser.add_argument('--style', '-s', default='default', + help='pygments style name') + parser.add_argument('--auto', '-a', action='store_true', + help='Run all blocks automatically without' + 'confirmation') + parser.add_argument('--delimiter', '-d', default='...', + help='slides delimiter added after each slide run') + parser.add_argument('file', nargs=1, + help='python demo file') + args = parser.parse_args() + slide(args.file[0], noclear=args.noclear, format_rst=args.rst, + formatter=args.formatter, style=args.style, auto_all=args.auto, + delimiter=args.delimiter) diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/display.py b/myenv/lib/python3.10/site-packages/IPython/lib/display.py new file mode 100644 index 000000000..5ff2983db --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/display.py @@ -0,0 +1,675 @@ +"""Various display related classes. + +Authors : MinRK, gregcaporaso, dannystaple +""" +from html import escape as html_escape +from os.path import exists, isfile, splitext, abspath, join, isdir +from os import walk, sep, fsdecode + +from IPython.core.display import DisplayObject, TextDisplayObject + +from typing import Tuple, Iterable + +__all__ = ['Audio', 'IFrame', 'YouTubeVideo', 'VimeoVideo', 'ScribdDocument', + 'FileLink', 'FileLinks', 'Code'] + + +class Audio(DisplayObject): + """Create an audio object. + + When this object is returned by an input cell or passed to the + display function, it will result in Audio controls being displayed + in the frontend (only works in the notebook). + + Parameters + ---------- + data : numpy array, list, unicode, str or bytes + Can be one of + + * Numpy 1d array containing the desired waveform (mono) + * Numpy 2d array containing waveforms for each channel. + Shape=(NCHAN, NSAMPLES). For the standard channel order, see + http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx + * List of float or integer representing the waveform (mono) + * String containing the filename + * Bytestring containing raw PCM data or + * URL pointing to a file on the web. + + If the array option is used, the waveform will be normalized. + + If a filename or url is used, the format support will be browser + dependent. + url : unicode + A URL to download the data from. + filename : unicode + Path to a local file to load the data from. + embed : boolean + Should the audio data be embedded using a data URI (True) or should + the original source be referenced. Set this to True if you want the + audio to playable later with no internet connection in the notebook. + + Default is `True`, unless the keyword argument `url` is set, then + default value is `False`. + rate : integer + The sampling rate of the raw data. + Only required when data parameter is being used as an array + autoplay : bool + Set to True if the audio should immediately start playing. + Default is `False`. + normalize : bool + Whether audio should be normalized (rescaled) to the maximum possible + range. Default is `True`. When set to `False`, `data` must be between + -1 and 1 (inclusive), otherwise an error is raised. + Applies only when `data` is a list or array of samples; other types of + audio are never normalized. + + Examples + -------- + + >>> import pytest + >>> np = pytest.importorskip("numpy") + + Generate a sound + + >>> import numpy as np + >>> framerate = 44100 + >>> t = np.linspace(0,5,framerate*5) + >>> data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t) + >>> Audio(data, rate=framerate) + + + Can also do stereo or more channels + + >>> dataleft = np.sin(2*np.pi*220*t) + >>> dataright = np.sin(2*np.pi*224*t) + >>> Audio([dataleft, dataright], rate=framerate) + + + From URL: + + >>> Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # doctest: +SKIP + >>> Audio(url="http://www.w3schools.com/html/horse.ogg") # doctest: +SKIP + + From a File: + + >>> Audio('IPython/lib/tests/test.wav') # doctest: +SKIP + >>> Audio(filename='IPython/lib/tests/test.wav') # doctest: +SKIP + + From Bytes: + + >>> Audio(b'RAW_WAV_DATA..') # doctest: +SKIP + >>> Audio(data=b'RAW_WAV_DATA..') # doctest: +SKIP + + See Also + -------- + ipywidgets.Audio + + Audio widget with more more flexibility and options. + + """ + _read_flags = 'rb' + + def __init__(self, data=None, filename=None, url=None, embed=None, rate=None, autoplay=False, normalize=True, *, + element_id=None): + if filename is None and url is None and data is None: + raise ValueError("No audio data found. Expecting filename, url, or data.") + if embed is False and url is None: + raise ValueError("No url found. Expecting url when embed=False") + + if url is not None and embed is not True: + self.embed = False + else: + self.embed = True + self.autoplay = autoplay + self.element_id = element_id + super(Audio, self).__init__(data=data, url=url, filename=filename) + + if self.data is not None and not isinstance(self.data, bytes): + if rate is None: + raise ValueError("rate must be specified when data is a numpy array or list of audio samples.") + self.data = Audio._make_wav(data, rate, normalize) + + def reload(self): + """Reload the raw data from file or URL.""" + import mimetypes + if self.embed: + super(Audio, self).reload() + + if self.filename is not None: + self.mimetype = mimetypes.guess_type(self.filename)[0] + elif self.url is not None: + self.mimetype = mimetypes.guess_type(self.url)[0] + else: + self.mimetype = "audio/wav" + + @staticmethod + def _make_wav(data, rate, normalize): + """ Transform a numpy array to a PCM bytestring """ + from io import BytesIO + import wave + + try: + scaled, nchan = Audio._validate_and_normalize_with_numpy(data, normalize) + except ImportError: + scaled, nchan = Audio._validate_and_normalize_without_numpy(data, normalize) + + fp = BytesIO() + waveobj = wave.open(fp,mode='wb') + waveobj.setnchannels(nchan) + waveobj.setframerate(rate) + waveobj.setsampwidth(2) + waveobj.setcomptype('NONE','NONE') + waveobj.writeframes(scaled) + val = fp.getvalue() + waveobj.close() + + return val + + @staticmethod + def _validate_and_normalize_with_numpy(data, normalize) -> Tuple[bytes, int]: + import numpy as np + + data = np.array(data, dtype=float) + if len(data.shape) == 1: + nchan = 1 + elif len(data.shape) == 2: + # In wave files,channels are interleaved. E.g., + # "L1R1L2R2..." for stereo. See + # http://msdn.microsoft.com/en-us/library/windows/hardware/dn653308(v=vs.85).aspx + # for channel ordering + nchan = data.shape[0] + data = data.T.ravel() + else: + raise ValueError('Array audio input must be a 1D or 2D array') + + max_abs_value = np.max(np.abs(data)) + normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize) + scaled = data / normalization_factor * 32767 + return scaled.astype(" 1: + raise ValueError('Audio data must be between -1 and 1 when normalize=False.') + return max_abs_value if normalize else 1 + + def _data_and_metadata(self): + """shortcut for returning metadata with url information, if defined""" + md = {} + if self.url: + md['url'] = self.url + if md: + return self.data, md + else: + return self.data + + def _repr_html_(self): + src = """ + + """ + return src.format(src=self.src_attr(), type=self.mimetype, autoplay=self.autoplay_attr(), + element_id=self.element_id_attr()) + + def src_attr(self): + import base64 + if self.embed and (self.data is not None): + data = base64=base64.b64encode(self.data).decode('ascii') + return """data:{type};base64,{base64}""".format(type=self.mimetype, + base64=data) + elif self.url is not None: + return self.url + else: + return "" + + def autoplay_attr(self): + if(self.autoplay): + return 'autoplay="autoplay"' + else: + return '' + + def element_id_attr(self): + if (self.element_id): + return 'id="{element_id}"'.format(element_id=self.element_id) + else: + return '' + +class IFrame(object): + """ + Generic class to embed an iframe in an IPython notebook + """ + + iframe = """ + + """ + + def __init__(self, src, width, height, extras: Iterable[str] = None, **kwargs): + if extras is None: + extras = [] + + self.src = src + self.width = width + self.height = height + self.extras = extras + self.params = kwargs + + def _repr_html_(self): + """return the embed iframe""" + if self.params: + from urllib.parse import urlencode + params = "?" + urlencode(self.params) + else: + params = "" + return self.iframe.format( + src=self.src, + width=self.width, + height=self.height, + params=params, + extras=" ".join(self.extras), + ) + + +class YouTubeVideo(IFrame): + """Class for embedding a YouTube Video in an IPython session, based on its video id. + + e.g. to embed the video from https://www.youtube.com/watch?v=foo , you would + do:: + + vid = YouTubeVideo("foo") + display(vid) + + To start from 30 seconds:: + + vid = YouTubeVideo("abc", start=30) + display(vid) + + To calculate seconds from time as hours, minutes, seconds use + :class:`datetime.timedelta`:: + + start=int(timedelta(hours=1, minutes=46, seconds=40).total_seconds()) + + Other parameters can be provided as documented at + https://developers.google.com/youtube/player_parameters#Parameters + + When converting the notebook using nbconvert, a jpeg representation of the video + will be inserted in the document. + """ + + def __init__(self, id, width=400, height=300, allow_autoplay=False, **kwargs): + self.id=id + src = "https://www.youtube.com/embed/{0}".format(id) + if allow_autoplay: + extras = list(kwargs.get("extras", [])) + ['allow="autoplay"'] + kwargs.update(autoplay=1, extras=extras) + super(YouTubeVideo, self).__init__(src, width, height, **kwargs) + + def _repr_jpeg_(self): + # Deferred import + from urllib.request import urlopen + + try: + return urlopen("https://img.youtube.com/vi/{id}/hqdefault.jpg".format(id=self.id)).read() + except IOError: + return None + +class VimeoVideo(IFrame): + """ + Class for embedding a Vimeo video in an IPython session, based on its video id. + """ + + def __init__(self, id, width=400, height=300, **kwargs): + src="https://player.vimeo.com/video/{0}".format(id) + super(VimeoVideo, self).__init__(src, width, height, **kwargs) + +class ScribdDocument(IFrame): + """ + Class for embedding a Scribd document in an IPython session + + Use the start_page params to specify a starting point in the document + Use the view_mode params to specify display type one off scroll | slideshow | book + + e.g to Display Wes' foundational paper about PANDAS in book mode from page 3 + + ScribdDocument(71048089, width=800, height=400, start_page=3, view_mode="book") + """ + + def __init__(self, id, width=400, height=300, **kwargs): + src="https://www.scribd.com/embeds/{0}/content".format(id) + super(ScribdDocument, self).__init__(src, width, height, **kwargs) + +class FileLink(object): + """Class for embedding a local file link in an IPython session, based on path + + e.g. to embed a link that was generated in the IPython notebook as my/data.txt + + you would do:: + + local_file = FileLink("my/data.txt") + display(local_file) + + or in the HTML notebook, just:: + + FileLink("my/data.txt") + """ + + html_link_str = "%s" + + def __init__(self, + path, + url_prefix='', + result_html_prefix='', + result_html_suffix='
'): + """ + Parameters + ---------- + path : str + path to the file or directory that should be formatted + url_prefix : str + prefix to be prepended to all files to form a working link [default: + ''] + result_html_prefix : str + text to append to beginning to link [default: ''] + result_html_suffix : str + text to append at the end of link [default: '
'] + """ + if isdir(path): + raise ValueError("Cannot display a directory using FileLink. " + "Use FileLinks to display '%s'." % path) + self.path = fsdecode(path) + self.url_prefix = url_prefix + self.result_html_prefix = result_html_prefix + self.result_html_suffix = result_html_suffix + + def _format_path(self): + fp = ''.join([self.url_prefix, html_escape(self.path)]) + return ''.join([self.result_html_prefix, + self.html_link_str % \ + (fp, html_escape(self.path, quote=False)), + self.result_html_suffix]) + + def _repr_html_(self): + """return html link to file + """ + if not exists(self.path): + return ("Path (%s) doesn't exist. " + "It may still be in the process of " + "being generated, or you may have the " + "incorrect path." % self.path) + + return self._format_path() + + def __repr__(self): + """return absolute path to file + """ + return abspath(self.path) + +class FileLinks(FileLink): + """Class for embedding local file links in an IPython session, based on path + + e.g. to embed links to files that were generated in the IPython notebook + under ``my/data``, you would do:: + + local_files = FileLinks("my/data") + display(local_files) + + or in the HTML notebook, just:: + + FileLinks("my/data") + """ + def __init__(self, + path, + url_prefix='', + included_suffixes=None, + result_html_prefix='', + result_html_suffix='
', + notebook_display_formatter=None, + terminal_display_formatter=None, + recursive=True): + """ + See :class:`FileLink` for the ``path``, ``url_prefix``, + ``result_html_prefix`` and ``result_html_suffix`` parameters. + + included_suffixes : list + Filename suffixes to include when formatting output [default: include + all files] + + notebook_display_formatter : function + Used to format links for display in the notebook. See discussion of + formatter functions below. + + terminal_display_formatter : function + Used to format links for display in the terminal. See discussion of + formatter functions below. + + Formatter functions must be of the form:: + + f(dirname, fnames, included_suffixes) + + dirname : str + The name of a directory + fnames : list + The files in that directory + included_suffixes : list + The file suffixes that should be included in the output (passing None + meansto include all suffixes in the output in the built-in formatters) + recursive : boolean + Whether to recurse into subdirectories. Default is True. + + The function should return a list of lines that will be printed in the + notebook (if passing notebook_display_formatter) or the terminal (if + passing terminal_display_formatter). This function is iterated over for + each directory in self.path. Default formatters are in place, can be + passed here to support alternative formatting. + + """ + if isfile(path): + raise ValueError("Cannot display a file using FileLinks. " + "Use FileLink to display '%s'." % path) + self.included_suffixes = included_suffixes + # remove trailing slashes for more consistent output formatting + path = path.rstrip('/') + + self.path = path + self.url_prefix = url_prefix + self.result_html_prefix = result_html_prefix + self.result_html_suffix = result_html_suffix + + self.notebook_display_formatter = \ + notebook_display_formatter or self._get_notebook_display_formatter() + self.terminal_display_formatter = \ + terminal_display_formatter or self._get_terminal_display_formatter() + + self.recursive = recursive + + def _get_display_formatter( + self, dirname_output_format, fname_output_format, fp_format, fp_cleaner=None + ): + """generate built-in formatter function + + this is used to define both the notebook and terminal built-in + formatters as they only differ by some wrapper text for each entry + + dirname_output_format: string to use for formatting directory + names, dirname will be substituted for a single "%s" which + must appear in this string + fname_output_format: string to use for formatting file names, + if a single "%s" appears in the string, fname will be substituted + if two "%s" appear in the string, the path to fname will be + substituted for the first and fname will be substituted for the + second + fp_format: string to use for formatting filepaths, must contain + exactly two "%s" and the dirname will be substituted for the first + and fname will be substituted for the second + """ + def f(dirname, fnames, included_suffixes=None): + result = [] + # begin by figuring out which filenames, if any, + # are going to be displayed + display_fnames = [] + for fname in fnames: + if (isfile(join(dirname,fname)) and + (included_suffixes is None or + splitext(fname)[1] in included_suffixes)): + display_fnames.append(fname) + + if len(display_fnames) == 0: + # if there are no filenames to display, don't print anything + # (not even the directory name) + pass + else: + # otherwise print the formatted directory name followed by + # the formatted filenames + dirname_output_line = dirname_output_format % dirname + result.append(dirname_output_line) + for fname in display_fnames: + fp = fp_format % (dirname,fname) + if fp_cleaner is not None: + fp = fp_cleaner(fp) + try: + # output can include both a filepath and a filename... + fname_output_line = fname_output_format % (fp, fname) + except TypeError: + # ... or just a single filepath + fname_output_line = fname_output_format % fname + result.append(fname_output_line) + return result + return f + + def _get_notebook_display_formatter(self, + spacer="  "): + """ generate function to use for notebook formatting + """ + dirname_output_format = \ + self.result_html_prefix + "%s/" + self.result_html_suffix + fname_output_format = \ + self.result_html_prefix + spacer + self.html_link_str + self.result_html_suffix + fp_format = self.url_prefix + '%s/%s' + if sep == "\\": + # Working on a platform where the path separator is "\", so + # must convert these to "/" for generating a URI + def fp_cleaner(fp): + # Replace all occurrences of backslash ("\") with a forward + # slash ("/") - this is necessary on windows when a path is + # provided as input, but we must link to a URI + return fp.replace('\\','/') + else: + fp_cleaner = None + + return self._get_display_formatter(dirname_output_format, + fname_output_format, + fp_format, + fp_cleaner) + + def _get_terminal_display_formatter(self, + spacer=" "): + """ generate function to use for terminal formatting + """ + dirname_output_format = "%s/" + fname_output_format = spacer + "%s" + fp_format = '%s/%s' + + return self._get_display_formatter(dirname_output_format, + fname_output_format, + fp_format) + + def _format_path(self): + result_lines = [] + if self.recursive: + walked_dir = list(walk(self.path)) + else: + walked_dir = [next(walk(self.path))] + walked_dir.sort() + for dirname, subdirs, fnames in walked_dir: + result_lines += self.notebook_display_formatter(dirname, fnames, self.included_suffixes) + return '\n'.join(result_lines) + + def __repr__(self): + """return newline-separated absolute paths + """ + result_lines = [] + if self.recursive: + walked_dir = list(walk(self.path)) + else: + walked_dir = [next(walk(self.path))] + walked_dir.sort() + for dirname, subdirs, fnames in walked_dir: + result_lines += self.terminal_display_formatter(dirname, fnames, self.included_suffixes) + return '\n'.join(result_lines) + + +class Code(TextDisplayObject): + """Display syntax-highlighted source code. + + This uses Pygments to highlight the code for HTML and Latex output. + + Parameters + ---------- + data : str + The code as a string + url : str + A URL to fetch the code from + filename : str + A local filename to load the code from + language : str + The short name of a Pygments lexer to use for highlighting. + If not specified, it will guess the lexer based on the filename + or the code. Available lexers: http://pygments.org/docs/lexers/ + """ + def __init__(self, data=None, url=None, filename=None, language=None): + self.language = language + super().__init__(data=data, url=url, filename=filename) + + def _get_lexer(self): + if self.language: + from pygments.lexers import get_lexer_by_name + return get_lexer_by_name(self.language) + elif self.filename: + from pygments.lexers import get_lexer_for_filename + return get_lexer_for_filename(self.filename) + else: + from pygments.lexers import guess_lexer + return guess_lexer(self.data) + + def __repr__(self): + return self.data + + def _repr_html_(self): + from pygments import highlight + from pygments.formatters import HtmlFormatter + fmt = HtmlFormatter() + style = ''.format(fmt.get_style_defs('.output_html')) + return style + highlight(self.data, self._get_lexer(), fmt) + + def _repr_latex_(self): + from pygments import highlight + from pygments.formatters import LatexFormatter + return highlight(self.data, self._get_lexer(), LatexFormatter()) diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/editorhooks.py b/myenv/lib/python3.10/site-packages/IPython/lib/editorhooks.py new file mode 100644 index 000000000..d8bd6ac81 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/editorhooks.py @@ -0,0 +1,127 @@ +""" 'editor' hooks for common editors that work well with ipython + +They should honor the line number argument, at least. + +Contributions are *very* welcome. +""" + +import os +import shlex +import subprocess +import sys + +from IPython import get_ipython +from IPython.core.error import TryNext +from IPython.utils import py3compat + + +def install_editor(template, wait=False): + """Installs the editor that is called by IPython for the %edit magic. + + This overrides the default editor, which is generally set by your EDITOR + environment variable or is notepad (windows) or vi (linux). By supplying a + template string `run_template`, you can control how the editor is invoked + by IPython -- (e.g. the format in which it accepts command line options) + + Parameters + ---------- + template : basestring + run_template acts as a template for how your editor is invoked by + the shell. It should contain '{filename}', which will be replaced on + invocation with the file name, and '{line}', $line by line number + (or 0) to invoke the file with. + wait : bool + If `wait` is true, wait until the user presses enter before returning, + to facilitate non-blocking editors that exit immediately after + the call. + """ + + # not all editors support $line, so we'll leave out this check + # for substitution in ['$file', '$line']: + # if not substitution in run_template: + # raise ValueError(('run_template should contain %s' + # ' for string substitution. You supplied "%s"' % (substitution, + # run_template))) + + def call_editor(self, filename, line=0): + if line is None: + line = 0 + cmd = template.format(filename=shlex.quote(filename), line=line) + print(">", cmd) + # shlex.quote doesn't work right on Windows, but it does after splitting + if sys.platform.startswith('win'): + cmd = shlex.split(cmd) + proc = subprocess.Popen(cmd, shell=True) + if proc.wait() != 0: + raise TryNext() + if wait: + py3compat.input("Press Enter when done editing:") + + get_ipython().set_hook('editor', call_editor) + get_ipython().editor = template + + +# in these, exe is always the path/name of the executable. Useful +# if you don't have the editor directory in your path +def komodo(exe=u'komodo'): + """ Activestate Komodo [Edit] """ + install_editor(exe + u' -l {line} {filename}', wait=True) + + +def scite(exe=u"scite"): + """ SciTE or Sc1 """ + install_editor(exe + u' {filename} -goto:{line}') + + +def notepadplusplus(exe=u'notepad++'): + """ Notepad++ http://notepad-plus.sourceforge.net """ + install_editor(exe + u' -n{line} {filename}') + + +def jed(exe=u'jed'): + """ JED, the lightweight emacsish editor """ + install_editor(exe + u' +{line} {filename}') + + +def idle(exe=u'idle'): + """ Idle, the editor bundled with python + + Parameters + ---------- + exe : str, None + If none, should be pretty smart about finding the executable. + """ + if exe is None: + import idlelib + p = os.path.dirname(idlelib.__filename__) + # i'm not sure if this actually works. Is this idle.py script + # guaranteed to be executable? + exe = os.path.join(p, 'idle.py') + install_editor(exe + u' {filename}') + + +def mate(exe=u'mate'): + """ TextMate, the missing editor""" + # wait=True is not required since we're using the -w flag to mate + install_editor(exe + u' -w -l {line} {filename}') + + +# ########################################## +# these are untested, report any problems +# ########################################## + + +def emacs(exe=u'emacs'): + install_editor(exe + u' +{line} {filename}') + + +def gnuclient(exe=u'gnuclient'): + install_editor(exe + u' -nw +{line} {filename}') + + +def crimson_editor(exe=u'cedt.exe'): + install_editor(exe + u' /L:{line} {filename}') + + +def kate(exe=u'kate'): + install_editor(exe + u' -u -l {line} {filename}') diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/guisupport.py b/myenv/lib/python3.10/site-packages/IPython/lib/guisupport.py new file mode 100644 index 000000000..cfd325e9d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/guisupport.py @@ -0,0 +1,155 @@ +# coding: utf-8 +""" +Support for creating GUI apps and starting event loops. + +IPython's GUI integration allows interactive plotting and GUI usage in IPython +session. IPython has two different types of GUI integration: + +1. The terminal based IPython supports GUI event loops through Python's + PyOS_InputHook. PyOS_InputHook is a hook that Python calls periodically + whenever raw_input is waiting for a user to type code. We implement GUI + support in the terminal by setting PyOS_InputHook to a function that + iterates the event loop for a short while. It is important to note that + in this situation, the real GUI event loop is NOT run in the normal + manner, so you can't use the normal means to detect that it is running. +2. In the two process IPython kernel/frontend, the GUI event loop is run in + the kernel. In this case, the event loop is run in the normal manner by + calling the function or method of the GUI toolkit that starts the event + loop. + +In addition to starting the GUI event loops in one of these two ways, IPython +will *always* create an appropriate GUI application object when GUi +integration is enabled. + +If you want your GUI apps to run in IPython you need to do two things: + +1. Test to see if there is already an existing main application object. If + there is, you should use it. If there is not an existing application object + you should create one. +2. Test to see if the GUI event loop is running. If it is, you should not + start it. If the event loop is not running you may start it. + +This module contains functions for each toolkit that perform these things +in a consistent manner. Because of how PyOS_InputHook runs the event loop +you cannot detect if the event loop is running using the traditional calls +(such as ``wx.GetApp.IsMainLoopRunning()`` in wxPython). If PyOS_InputHook is +set These methods will return a false negative. That is, they will say the +event loop is not running, when is actually is. To work around this limitation +we proposed the following informal protocol: + +* Whenever someone starts the event loop, they *must* set the ``_in_event_loop`` + attribute of the main application object to ``True``. This should be done + regardless of how the event loop is actually run. +* Whenever someone stops the event loop, they *must* set the ``_in_event_loop`` + attribute of the main application object to ``False``. +* If you want to see if the event loop is running, you *must* use ``hasattr`` + to see if ``_in_event_loop`` attribute has been set. If it is set, you + *must* use its value. If it has not been set, you can query the toolkit + in the normal manner. +* If you want GUI support and no one else has created an application or + started the event loop you *must* do this. We don't want projects to + attempt to defer these things to someone else if they themselves need it. + +The functions below implement this logic for each GUI toolkit. If you need +to create custom application subclasses, you will likely have to modify this +code for your own purposes. This code can be copied into your own project +so you don't have to depend on IPython. + +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from IPython.core.getipython import get_ipython + +#----------------------------------------------------------------------------- +# wx +#----------------------------------------------------------------------------- + +def get_app_wx(*args, **kwargs): + """Create a new wx app or return an exiting one.""" + import wx + app = wx.GetApp() + if app is None: + if 'redirect' not in kwargs: + kwargs['redirect'] = False + app = wx.PySimpleApp(*args, **kwargs) + return app + +def is_event_loop_running_wx(app=None): + """Is the wx event loop running.""" + # New way: check attribute on shell instance + ip = get_ipython() + if ip is not None: + if ip.active_eventloop and ip.active_eventloop == 'wx': + return True + # Fall through to checking the application, because Wx has a native way + # to check if the event loop is running, unlike Qt. + + # Old way: check Wx application + if app is None: + app = get_app_wx() + if hasattr(app, '_in_event_loop'): + return app._in_event_loop + else: + return app.IsMainLoopRunning() + +def start_event_loop_wx(app=None): + """Start the wx event loop in a consistent manner.""" + if app is None: + app = get_app_wx() + if not is_event_loop_running_wx(app): + app._in_event_loop = True + app.MainLoop() + app._in_event_loop = False + else: + app._in_event_loop = True + +#----------------------------------------------------------------------------- +# qt4 +#----------------------------------------------------------------------------- + +def get_app_qt4(*args, **kwargs): + """Create a new qt4 app or return an existing one.""" + from IPython.external.qt_for_kernel import QtGui + app = QtGui.QApplication.instance() + if app is None: + if not args: + args = ([''],) + app = QtGui.QApplication(*args, **kwargs) + return app + +def is_event_loop_running_qt4(app=None): + """Is the qt4 event loop running.""" + # New way: check attribute on shell instance + ip = get_ipython() + if ip is not None: + return ip.active_eventloop and ip.active_eventloop.startswith('qt') + + # Old way: check attribute on QApplication singleton + if app is None: + app = get_app_qt4(['']) + if hasattr(app, '_in_event_loop'): + return app._in_event_loop + else: + # Does qt4 provide a other way to detect this? + return False + +def start_event_loop_qt4(app=None): + """Start the qt4 event loop in a consistent manner.""" + if app is None: + app = get_app_qt4(['']) + if not is_event_loop_running_qt4(app): + app._in_event_loop = True + app.exec_() + app._in_event_loop = False + else: + app._in_event_loop = True + +#----------------------------------------------------------------------------- +# Tk +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# gtk +#----------------------------------------------------------------------------- diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/latextools.py b/myenv/lib/python3.10/site-packages/IPython/lib/latextools.py new file mode 100644 index 000000000..693060ba5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/latextools.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +"""Tools for handling LaTeX.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +from io import BytesIO, open +import os +import tempfile +import shutil +import subprocess +from base64 import encodebytes +import textwrap + +from pathlib import Path + +from IPython.utils.process import find_cmd, FindCmdError +from traitlets.config import get_config +from traitlets.config.configurable import SingletonConfigurable +from traitlets import List, Bool, Unicode +from IPython.utils.py3compat import cast_unicode + + +class LaTeXTool(SingletonConfigurable): + """An object to store configuration of the LaTeX tool.""" + def _config_default(self): + return get_config() + + backends = List( + Unicode(), ["matplotlib", "dvipng"], + help="Preferred backend to draw LaTeX math equations. " + "Backends in the list are checked one by one and the first " + "usable one is used. Note that `matplotlib` backend " + "is usable only for inline style equations. To draw " + "display style equations, `dvipng` backend must be specified. ", + # It is a List instead of Enum, to make configuration more + # flexible. For example, to use matplotlib mainly but dvipng + # for display style, the default ["matplotlib", "dvipng"] can + # be used. To NOT use dvipng so that other repr such as + # unicode pretty printing is used, you can use ["matplotlib"]. + ).tag(config=True) + + use_breqn = Bool( + True, + help="Use breqn.sty to automatically break long equations. " + "This configuration takes effect only for dvipng backend.", + ).tag(config=True) + + packages = List( + ['amsmath', 'amsthm', 'amssymb', 'bm'], + help="A list of packages to use for dvipng backend. " + "'breqn' will be automatically appended when use_breqn=True.", + ).tag(config=True) + + preamble = Unicode( + help="Additional preamble to use when generating LaTeX source " + "for dvipng backend.", + ).tag(config=True) + + +def latex_to_png(s, encode=False, backend=None, wrap=False, color='Black', + scale=1.0): + """Render a LaTeX string to PNG. + + Parameters + ---------- + s : str + The raw string containing valid inline LaTeX. + encode : bool, optional + Should the PNG data base64 encoded to make it JSON'able. + backend : {matplotlib, dvipng} + Backend for producing PNG data. + wrap : bool + If true, Automatically wrap `s` as a LaTeX equation. + color : string + Foreground color name among dvipsnames, e.g. 'Maroon' or on hex RGB + format, e.g. '#AA20FA'. + scale : float + Scale factor for the resulting PNG. + None is returned when the backend cannot be used. + + """ + s = cast_unicode(s) + allowed_backends = LaTeXTool.instance().backends + if backend is None: + backend = allowed_backends[0] + if backend not in allowed_backends: + return None + if backend == 'matplotlib': + f = latex_to_png_mpl + elif backend == 'dvipng': + f = latex_to_png_dvipng + if color.startswith('#'): + # Convert hex RGB color to LaTeX RGB color. + if len(color) == 7: + try: + color = "RGB {}".format(" ".join([str(int(x, 16)) for x in + textwrap.wrap(color[1:], 2)])) + except ValueError as e: + raise ValueError('Invalid color specification {}.'.format(color)) from e + else: + raise ValueError('Invalid color specification {}.'.format(color)) + else: + raise ValueError('No such backend {0}'.format(backend)) + bin_data = f(s, wrap, color, scale) + if encode and bin_data: + bin_data = encodebytes(bin_data) + return bin_data + + +def latex_to_png_mpl(s, wrap, color='Black', scale=1.0): + try: + from matplotlib import figure, font_manager, mathtext + from matplotlib.backends import backend_agg + from pyparsing import ParseFatalException + except ImportError: + return None + + # mpl mathtext doesn't support display math, force inline + s = s.replace('$$', '$') + if wrap: + s = u'${0}$'.format(s) + + try: + prop = font_manager.FontProperties(size=12) + dpi = 120 * scale + buffer = BytesIO() + + # Adapted from mathtext.math_to_image + parser = mathtext.MathTextParser("path") + width, height, depth, _, _ = parser.parse(s, dpi=72, prop=prop) + fig = figure.Figure(figsize=(width / 72, height / 72)) + fig.text(0, depth / height, s, fontproperties=prop, color=color) + backend_agg.FigureCanvasAgg(fig) + fig.savefig(buffer, dpi=dpi, format="png", transparent=True) + return buffer.getvalue() + except (ValueError, RuntimeError, ParseFatalException): + return None + + +def latex_to_png_dvipng(s, wrap, color='Black', scale=1.0): + try: + find_cmd('latex') + find_cmd('dvipng') + except FindCmdError: + return None + + startupinfo = None + if os.name == "nt": + # prevent popup-windows + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + + try: + workdir = Path(tempfile.mkdtemp()) + tmpfile = "tmp.tex" + dvifile = "tmp.dvi" + outfile = "tmp.png" + + with workdir.joinpath(tmpfile).open("w", encoding="utf8") as f: + f.writelines(genelatex(s, wrap)) + + with open(os.devnull, 'wb') as devnull: + subprocess.check_call( + ["latex", "-halt-on-error", "-interaction", "batchmode", tmpfile], + cwd=workdir, + stdout=devnull, + stderr=devnull, + startupinfo=startupinfo, + ) + + resolution = round(150*scale) + subprocess.check_call( + [ + "dvipng", + "-T", + "tight", + "-D", + str(resolution), + "-z", + "9", + "-bg", + "Transparent", + "-o", + outfile, + dvifile, + "-fg", + color, + ], + cwd=workdir, + stdout=devnull, + stderr=devnull, + startupinfo=startupinfo, + ) + + with workdir.joinpath(outfile).open("rb") as f: + return f.read() + except subprocess.CalledProcessError: + return None + finally: + shutil.rmtree(workdir) + + +def kpsewhich(filename): + """Invoke kpsewhich command with an argument `filename`.""" + try: + find_cmd("kpsewhich") + proc = subprocess.Popen( + ["kpsewhich", filename], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (stdout, stderr) = proc.communicate() + return stdout.strip().decode('utf8', 'replace') + except FindCmdError: + pass + + +def genelatex(body, wrap): + """Generate LaTeX document for dvipng backend.""" + lt = LaTeXTool.instance() + breqn = wrap and lt.use_breqn and kpsewhich("breqn.sty") + yield r'\documentclass{article}' + packages = lt.packages + if breqn: + packages = packages + ['breqn'] + for pack in packages: + yield r'\usepackage{{{0}}}'.format(pack) + yield r'\pagestyle{empty}' + if lt.preamble: + yield lt.preamble + yield r'\begin{document}' + if breqn: + yield r'\begin{dmath*}' + yield body + yield r'\end{dmath*}' + elif wrap: + yield u'$${0}$$'.format(body) + else: + yield body + yield u'\\end{document}' + + +_data_uri_template_png = u"""%s""" + +def latex_to_html(s, alt='image'): + """Render LaTeX to HTML with embedded PNG data using data URIs. + + Parameters + ---------- + s : str + The raw string containing valid inline LateX. + alt : str + The alt text to use for the HTML. + """ + base64_data = latex_to_png(s, encode=True).decode('ascii') + if base64_data: + return _data_uri_template_png % (base64_data, alt) + + diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/lexers.py b/myenv/lib/python3.10/site-packages/IPython/lib/lexers.py new file mode 100644 index 000000000..0c9b6e1bc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/lexers.py @@ -0,0 +1,526 @@ +# -*- coding: utf-8 -*- +""" +Defines a variety of Pygments lexers for highlighting IPython code. + +This includes: + + IPythonLexer, IPython3Lexer + Lexers for pure IPython (python + magic/shell commands) + + IPythonPartialTracebackLexer, IPythonTracebackLexer + Supports 2.x and 3.x via keyword `python3`. The partial traceback + lexer reads everything but the Python code appearing in a traceback. + The full lexer combines the partial lexer with an IPython lexer. + + IPythonConsoleLexer + A lexer for IPython console sessions, with support for tracebacks. + + IPyLexer + A friendly lexer which examines the first line of text and from it, + decides whether to use an IPython lexer or an IPython console lexer. + This is probably the only lexer that needs to be explicitly added + to Pygments. + +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2013, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +# Standard library +import re + +# Third party +from pygments.lexers import ( + BashLexer, HtmlLexer, JavascriptLexer, RubyLexer, PerlLexer, PythonLexer, + Python3Lexer, TexLexer) +from pygments.lexer import ( + Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using, +) +from pygments.token import ( + Generic, Keyword, Literal, Name, Operator, Other, Text, Error, +) +from pygments.util import get_bool_opt + +# Local + +line_re = re.compile('.*?\n') + +__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer', + 'IPythonPartialTracebackLexer', 'IPythonTracebackLexer', + 'IPythonConsoleLexer', 'IPyLexer'] + + +def build_ipy_lexer(python3): + """Builds IPython lexers depending on the value of `python3`. + + The lexer inherits from an appropriate Python lexer and then adds + information about IPython specific keywords (i.e. magic commands, + shell commands, etc.) + + Parameters + ---------- + python3 : bool + If `True`, then build an IPython lexer from a Python 3 lexer. + + """ + # It would be nice to have a single IPython lexer class which takes + # a boolean `python3`. But since there are two Python lexer classes, + # we will also have two IPython lexer classes. + if python3: + PyLexer = Python3Lexer + name = 'IPython3' + aliases = ['ipython3'] + doc = """IPython3 Lexer""" + else: + PyLexer = PythonLexer + name = 'IPython' + aliases = ['ipython2', 'ipython'] + doc = """IPython Lexer""" + + ipython_tokens = [ + (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(HtmlLexer))), + (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), + (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(JavascriptLexer))), + (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(TexLexer))), + (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PerlLexer))), + (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PythonLexer))), + (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(Python3Lexer))), + (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(RubyLexer))), + (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(PyLexer))), + (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), + (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))), + (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), + (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), + (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword, + using(BashLexer), Text)), + (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)), + (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), + (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)), + (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)), + (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)), + ] + + tokens = PyLexer.tokens.copy() + tokens['root'] = ipython_tokens + tokens['root'] + + attrs = {'name': name, 'aliases': aliases, 'filenames': [], + '__doc__': doc, 'tokens': tokens} + + return type(name, (PyLexer,), attrs) + + +IPython3Lexer = build_ipy_lexer(python3=True) +IPythonLexer = build_ipy_lexer(python3=False) + + +class IPythonPartialTracebackLexer(RegexLexer): + """ + Partial lexer for IPython tracebacks. + + Handles all the non-python output. + + """ + name = 'IPython Partial Traceback' + + tokens = { + 'root': [ + # Tracebacks for syntax errors have a different style. + # For both types of tracebacks, we mark the first line with + # Generic.Traceback. For syntax errors, we mark the filename + # as we mark the filenames for non-syntax tracebacks. + # + # These two regexps define how IPythonConsoleLexer finds a + # traceback. + # + ## Non-syntax traceback + (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)), + ## Syntax traceback + (r'^( File)(.*)(, line )(\d+\n)', + bygroups(Generic.Traceback, Name.Namespace, + Generic.Traceback, Literal.Number.Integer)), + + # (Exception Identifier)(Whitespace)(Traceback Message) + (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)', + bygroups(Name.Exception, Generic.Whitespace, Text)), + # (Module/Filename)(Text)(Callee)(Function Signature) + # Better options for callee and function signature? + (r'(.*)( in )(.*)(\(.*\)\n)', + bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)), + # Regular line: (Whitespace)(Line Number)(Python Code) + (r'(\s*?)(\d+)(.*?\n)', + bygroups(Generic.Whitespace, Literal.Number.Integer, Other)), + # Emphasized line: (Arrow)(Line Number)(Python Code) + # Using Exception token so arrow color matches the Exception. + (r'(-*>?\s?)(\d+)(.*?\n)', + bygroups(Name.Exception, Literal.Number.Integer, Other)), + # (Exception Identifier)(Message) + (r'(?u)(^[^\d\W]\w*)(:.*?\n)', + bygroups(Name.Exception, Text)), + # Tag everything else as Other, will be handled later. + (r'.*\n', Other), + ], + } + + +class IPythonTracebackLexer(DelegatingLexer): + """ + IPython traceback lexer. + + For doctests, the tracebacks can be snipped as much as desired with the + exception to the lines that designate a traceback. For non-syntax error + tracebacks, this is the line of hyphens. For syntax error tracebacks, + this is the line which lists the File and line number. + + """ + # The lexer inherits from DelegatingLexer. The "root" lexer is an + # appropriate IPython lexer, which depends on the value of the boolean + # `python3`. First, we parse with the partial IPython traceback lexer. + # Then, any code marked with the "Other" token is delegated to the root + # lexer. + # + name = 'IPython Traceback' + aliases = ['ipythontb'] + + def __init__(self, **options): + self.python3 = get_bool_opt(options, 'python3', False) + if self.python3: + self.aliases = ['ipython3tb'] + else: + self.aliases = ['ipython2tb', 'ipythontb'] + + if self.python3: + IPyLexer = IPython3Lexer + else: + IPyLexer = IPythonLexer + + DelegatingLexer.__init__(self, IPyLexer, + IPythonPartialTracebackLexer, **options) + +class IPythonConsoleLexer(Lexer): + """ + An IPython console lexer for IPython code-blocks and doctests, such as: + + .. code-block:: rst + + .. code-block:: ipythonconsole + + In [1]: a = 'foo' + + In [2]: a + Out[2]: 'foo' + + In [3]: print(a) + foo + + + Support is also provided for IPython exceptions: + + .. code-block:: rst + + .. code-block:: ipythonconsole + + In [1]: raise Exception + Traceback (most recent call last): + ... + Exception + + """ + name = 'IPython console session' + aliases = ['ipythonconsole'] + mimetypes = ['text/x-ipython-console'] + + # The regexps used to determine what is input and what is output. + # The default prompts for IPython are: + # + # in = 'In [#]: ' + # continuation = ' .D.: ' + # template = 'Out[#]: ' + # + # Where '#' is the 'prompt number' or 'execution count' and 'D' + # D is a number of dots matching the width of the execution count + # + in1_regex = r'In \[[0-9]+\]: ' + in2_regex = r' \.\.+\.: ' + out_regex = r'Out\[[0-9]+\]: ' + + #: The regex to determine when a traceback starts. + ipytb_start = re.compile(r'^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)') + + def __init__(self, **options): + """Initialize the IPython console lexer. + + Parameters + ---------- + python3 : bool + If `True`, then the console inputs are parsed using a Python 3 + lexer. Otherwise, they are parsed using a Python 2 lexer. + in1_regex : RegexObject + The compiled regular expression used to detect the start + of inputs. Although the IPython configuration setting may have a + trailing whitespace, do not include it in the regex. If `None`, + then the default input prompt is assumed. + in2_regex : RegexObject + The compiled regular expression used to detect the continuation + of inputs. Although the IPython configuration setting may have a + trailing whitespace, do not include it in the regex. If `None`, + then the default input prompt is assumed. + out_regex : RegexObject + The compiled regular expression used to detect outputs. If `None`, + then the default output prompt is assumed. + + """ + self.python3 = get_bool_opt(options, 'python3', False) + if self.python3: + self.aliases = ['ipython3console'] + else: + self.aliases = ['ipython2console', 'ipythonconsole'] + + in1_regex = options.get('in1_regex', self.in1_regex) + in2_regex = options.get('in2_regex', self.in2_regex) + out_regex = options.get('out_regex', self.out_regex) + + # So that we can work with input and output prompts which have been + # rstrip'd (possibly by editors) we also need rstrip'd variants. If + # we do not do this, then such prompts will be tagged as 'output'. + # The reason can't just use the rstrip'd variants instead is because + # we want any whitespace associated with the prompt to be inserted + # with the token. This allows formatted code to be modified so as hide + # the appearance of prompts, with the whitespace included. One example + # use of this is in copybutton.js from the standard lib Python docs. + in1_regex_rstrip = in1_regex.rstrip() + '\n' + in2_regex_rstrip = in2_regex.rstrip() + '\n' + out_regex_rstrip = out_regex.rstrip() + '\n' + + # Compile and save them all. + attrs = ['in1_regex', 'in2_regex', 'out_regex', + 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip'] + for attr in attrs: + self.__setattr__(attr, re.compile(locals()[attr])) + + Lexer.__init__(self, **options) + + if self.python3: + pylexer = IPython3Lexer + tblexer = IPythonTracebackLexer + else: + pylexer = IPythonLexer + tblexer = IPythonTracebackLexer + + self.pylexer = pylexer(**options) + self.tblexer = tblexer(**options) + + self.reset() + + def reset(self): + self.mode = 'output' + self.index = 0 + self.buffer = u'' + self.insertions = [] + + def buffered_tokens(self): + """ + Generator of unprocessed tokens after doing insertions and before + changing to a new state. + + """ + if self.mode == 'output': + tokens = [(0, Generic.Output, self.buffer)] + elif self.mode == 'input': + tokens = self.pylexer.get_tokens_unprocessed(self.buffer) + else: # traceback + tokens = self.tblexer.get_tokens_unprocessed(self.buffer) + + for i, t, v in do_insertions(self.insertions, tokens): + # All token indexes are relative to the buffer. + yield self.index + i, t, v + + # Clear it all + self.index += len(self.buffer) + self.buffer = u'' + self.insertions = [] + + def get_mci(self, line): + """ + Parses the line and returns a 3-tuple: (mode, code, insertion). + + `mode` is the next mode (or state) of the lexer, and is always equal + to 'input', 'output', or 'tb'. + + `code` is a portion of the line that should be added to the buffer + corresponding to the next mode and eventually lexed by another lexer. + For example, `code` could be Python code if `mode` were 'input'. + + `insertion` is a 3-tuple (index, token, text) representing an + unprocessed "token" that will be inserted into the stream of tokens + that are created from the buffer once we change modes. This is usually + the input or output prompt. + + In general, the next mode depends on current mode and on the contents + of `line`. + + """ + # To reduce the number of regex match checks, we have multiple + # 'if' blocks instead of 'if-elif' blocks. + + # Check for possible end of input + in2_match = self.in2_regex.match(line) + in2_match_rstrip = self.in2_regex_rstrip.match(line) + if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \ + in2_match_rstrip: + end_input = True + else: + end_input = False + if end_input and self.mode != 'tb': + # Only look for an end of input when not in tb mode. + # An ellipsis could appear within the traceback. + mode = 'output' + code = u'' + insertion = (0, Generic.Prompt, line) + return mode, code, insertion + + # Check for output prompt + out_match = self.out_regex.match(line) + out_match_rstrip = self.out_regex_rstrip.match(line) + if out_match or out_match_rstrip: + mode = 'output' + if out_match: + idx = out_match.end() + else: + idx = out_match_rstrip.end() + code = line[idx:] + # Use the 'heading' token for output. We cannot use Generic.Error + # since it would conflict with exceptions. + insertion = (0, Generic.Heading, line[:idx]) + return mode, code, insertion + + + # Check for input or continuation prompt (non stripped version) + in1_match = self.in1_regex.match(line) + if in1_match or (in2_match and self.mode != 'tb'): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = 'input' + if in1_match: + idx = in1_match.end() + else: # in2_match + idx = in2_match.end() + code = line[idx:] + insertion = (0, Generic.Prompt, line[:idx]) + return mode, code, insertion + + # Check for input or continuation prompt (stripped version) + in1_match_rstrip = self.in1_regex_rstrip.match(line) + if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = 'input' + if in1_match_rstrip: + idx = in1_match_rstrip.end() + else: # in2_match + idx = in2_match_rstrip.end() + code = line[idx:] + insertion = (0, Generic.Prompt, line[:idx]) + return mode, code, insertion + + # Check for traceback + if self.ipytb_start.match(line): + mode = 'tb' + code = line + insertion = None + return mode, code, insertion + + # All other stuff... + if self.mode in ('input', 'output'): + # We assume all other text is output. Multiline input that + # does not use the continuation marker cannot be detected. + # For example, the 3 in the following is clearly output: + # + # In [1]: print 3 + # 3 + # + # But the following second line is part of the input: + # + # In [2]: while True: + # print True + # + # In both cases, the 2nd line will be 'output'. + # + mode = 'output' + else: + mode = 'tb' + + code = line + insertion = None + + return mode, code, insertion + + def get_tokens_unprocessed(self, text): + self.reset() + for match in line_re.finditer(text): + line = match.group() + mode, code, insertion = self.get_mci(line) + + if mode != self.mode: + # Yield buffered tokens before transitioning to new mode. + for token in self.buffered_tokens(): + yield token + self.mode = mode + + if insertion: + self.insertions.append((len(self.buffer), [insertion])) + self.buffer += code + + for token in self.buffered_tokens(): + yield token + +class IPyLexer(Lexer): + r""" + Primary lexer for all IPython-like code. + + This is a simple helper lexer. If the first line of the text begins with + "In \[[0-9]+\]:", then the entire text is parsed with an IPython console + lexer. If not, then the entire text is parsed with an IPython lexer. + + The goal is to reduce the number of lexers that are registered + with Pygments. + + """ + name = 'IPy session' + aliases = ['ipy'] + + def __init__(self, **options): + self.python3 = get_bool_opt(options, 'python3', False) + if self.python3: + self.aliases = ['ipy3'] + else: + self.aliases = ['ipy2', 'ipy'] + + Lexer.__init__(self, **options) + + self.IPythonLexer = IPythonLexer(**options) + self.IPythonConsoleLexer = IPythonConsoleLexer(**options) + + def get_tokens_unprocessed(self, text): + # Search for the input prompt anywhere...this allows code blocks to + # begin with comments as well. + if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL): + lex = self.IPythonConsoleLexer + else: + lex = self.IPythonLexer + for token in lex.get_tokens_unprocessed(text): + yield token + diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/pretty.py b/myenv/lib/python3.10/site-packages/IPython/lib/pretty.py new file mode 100644 index 000000000..f7feff9c3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/pretty.py @@ -0,0 +1,953 @@ +# -*- coding: utf-8 -*- +""" +Python advanced pretty printer. This pretty printer is intended to +replace the old `pprint` python module which does not allow developers +to provide their own pretty print callbacks. + +This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`. + + +Example Usage +------------- + +To directly print the representation of an object use `pprint`:: + + from pretty import pprint + pprint(complex_object) + +To get a string of the output use `pretty`:: + + from pretty import pretty + string = pretty(complex_object) + + +Extending +--------- + +The pretty library allows developers to add pretty printing rules for their +own objects. This process is straightforward. All you have to do is to +add a `_repr_pretty_` method to your object and call the methods on the +pretty printer passed:: + + class MyObject(object): + + def _repr_pretty_(self, p, cycle): + ... + +Here's an example for a class with a simple constructor:: + + class MySimpleObject: + + def __init__(self, a, b, *, c=None): + self.a = a + self.b = b + self.c = c + + def _repr_pretty_(self, p, cycle): + ctor = CallExpression.factory(self.__class__.__name__) + if self.c is None: + p.pretty(ctor(a, b)) + else: + p.pretty(ctor(a, b, c=c)) + +Here is an example implementation of a `_repr_pretty_` method for a list +subclass:: + + class MyList(list): + + def _repr_pretty_(self, p, cycle): + if cycle: + p.text('MyList(...)') + else: + with p.group(8, 'MyList([', '])'): + for idx, item in enumerate(self): + if idx: + p.text(',') + p.breakable() + p.pretty(item) + +The `cycle` parameter is `True` if pretty detected a cycle. You *have* to +react to that or the result is an infinite loop. `p.text()` just adds +non breaking text to the output, `p.breakable()` either adds a whitespace +or breaks here. If you pass it an argument it's used instead of the +default space. `p.pretty` prettyprints another object using the pretty print +method. + +The first parameter to the `group` function specifies the extra indentation +of the next line. In this example the next item will either be on the same +line (if the items are short enough) or aligned with the right edge of the +opening bracket of `MyList`. + +If you just want to indent something you can use the group function +without open / close parameters. You can also use this code:: + + with p.indent(2): + ... + +Inheritance diagram: + +.. inheritance-diagram:: IPython.lib.pretty + :parts: 3 + +:copyright: 2007 by Armin Ronacher. + Portions (c) 2009 by Robert Kern. +:license: BSD License. +""" + +from contextlib import contextmanager +import datetime +import os +import re +import sys +import types +from collections import deque +from inspect import signature +from io import StringIO +from warnings import warn + +from IPython.utils.decorators import undoc +from IPython.utils.py3compat import PYPY + +__all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter', + 'for_type', 'for_type_by_name', 'RawText', 'RawStringLiteral', 'CallExpression'] + + +MAX_SEQ_LENGTH = 1000 +_re_pattern_type = type(re.compile('')) + +def _safe_getattr(obj, attr, default=None): + """Safe version of getattr. + + Same as getattr, but will return ``default`` on any Exception, + rather than raising. + """ + try: + return getattr(obj, attr, default) + except Exception: + return default + +@undoc +class CUnicodeIO(StringIO): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + warn(("CUnicodeIO is deprecated since IPython 6.0. " + "Please use io.StringIO instead."), + DeprecationWarning, stacklevel=2) + +def _sorted_for_pprint(items): + """ + Sort the given items for pretty printing. Since some predictable + sorting is better than no sorting at all, we sort on the string + representation if normal sorting fails. + """ + items = list(items) + try: + return sorted(items) + except Exception: + try: + return sorted(items, key=str) + except Exception: + return items + +def pretty(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): + """ + Pretty print the object's representation. + """ + stream = StringIO() + printer = RepresentationPrinter(stream, verbose, max_width, newline, max_seq_length=max_seq_length) + printer.pretty(obj) + printer.flush() + return stream.getvalue() + + +def pprint(obj, verbose=False, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): + """ + Like `pretty` but print to stdout. + """ + printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline, max_seq_length=max_seq_length) + printer.pretty(obj) + printer.flush() + sys.stdout.write(newline) + sys.stdout.flush() + +class _PrettyPrinterBase(object): + + @contextmanager + def indent(self, indent): + """with statement support for indenting/dedenting.""" + self.indentation += indent + try: + yield + finally: + self.indentation -= indent + + @contextmanager + def group(self, indent=0, open='', close=''): + """like begin_group / end_group but for the with statement.""" + self.begin_group(indent, open) + try: + yield + finally: + self.end_group(indent, close) + +class PrettyPrinter(_PrettyPrinterBase): + """ + Baseclass for the `RepresentationPrinter` prettyprinter that is used to + generate pretty reprs of objects. Contrary to the `RepresentationPrinter` + this printer knows nothing about the default pprinters or the `_repr_pretty_` + callback method. + """ + + def __init__(self, output, max_width=79, newline='\n', max_seq_length=MAX_SEQ_LENGTH): + self.output = output + self.max_width = max_width + self.newline = newline + self.max_seq_length = max_seq_length + self.output_width = 0 + self.buffer_width = 0 + self.buffer = deque() + + root_group = Group(0) + self.group_stack = [root_group] + self.group_queue = GroupQueue(root_group) + self.indentation = 0 + + def _break_one_group(self, group): + while group.breakables: + x = self.buffer.popleft() + self.output_width = x.output(self.output, self.output_width) + self.buffer_width -= x.width + while self.buffer and isinstance(self.buffer[0], Text): + x = self.buffer.popleft() + self.output_width = x.output(self.output, self.output_width) + self.buffer_width -= x.width + + def _break_outer_groups(self): + while self.max_width < self.output_width + self.buffer_width: + group = self.group_queue.deq() + if not group: + return + self._break_one_group(group) + + def text(self, obj): + """Add literal text to the output.""" + width = len(obj) + if self.buffer: + text = self.buffer[-1] + if not isinstance(text, Text): + text = Text() + self.buffer.append(text) + text.add(obj, width) + self.buffer_width += width + self._break_outer_groups() + else: + self.output.write(obj) + self.output_width += width + + def breakable(self, sep=' '): + """ + Add a breakable separator to the output. This does not mean that it + will automatically break here. If no breaking on this position takes + place the `sep` is inserted which default to one space. + """ + width = len(sep) + group = self.group_stack[-1] + if group.want_break: + self.flush() + self.output.write(self.newline) + self.output.write(' ' * self.indentation) + self.output_width = self.indentation + self.buffer_width = 0 + else: + self.buffer.append(Breakable(sep, width, self)) + self.buffer_width += width + self._break_outer_groups() + + def break_(self): + """ + Explicitly insert a newline into the output, maintaining correct indentation. + """ + group = self.group_queue.deq() + if group: + self._break_one_group(group) + self.flush() + self.output.write(self.newline) + self.output.write(' ' * self.indentation) + self.output_width = self.indentation + self.buffer_width = 0 + + + def begin_group(self, indent=0, open=''): + """ + Begin a group. + The first parameter specifies the indentation for the next line (usually + the width of the opening text), the second the opening text. All + parameters are optional. + """ + if open: + self.text(open) + group = Group(self.group_stack[-1].depth + 1) + self.group_stack.append(group) + self.group_queue.enq(group) + self.indentation += indent + + def _enumerate(self, seq): + """like enumerate, but with an upper limit on the number of items""" + for idx, x in enumerate(seq): + if self.max_seq_length and idx >= self.max_seq_length: + self.text(',') + self.breakable() + self.text('...') + return + yield idx, x + + def end_group(self, dedent=0, close=''): + """End a group. See `begin_group` for more details.""" + self.indentation -= dedent + group = self.group_stack.pop() + if not group.breakables: + self.group_queue.remove(group) + if close: + self.text(close) + + def flush(self): + """Flush data that is left in the buffer.""" + for data in self.buffer: + self.output_width += data.output(self.output, self.output_width) + self.buffer.clear() + self.buffer_width = 0 + + +def _get_mro(obj_class): + """ Get a reasonable method resolution order of a class and its superclasses + for both old-style and new-style classes. + """ + if not hasattr(obj_class, '__mro__'): + # Old-style class. Mix in object to make a fake new-style class. + try: + obj_class = type(obj_class.__name__, (obj_class, object), {}) + except TypeError: + # Old-style extension type that does not descend from object. + # FIXME: try to construct a more thorough MRO. + mro = [obj_class] + else: + mro = obj_class.__mro__[1:-1] + else: + mro = obj_class.__mro__ + return mro + + +class RepresentationPrinter(PrettyPrinter): + """ + Special pretty printer that has a `pretty` method that calls the pretty + printer for a python object. + + This class stores processing data on `self` so you must *never* use + this class in a threaded environment. Always lock it or reinstanciate + it. + + Instances also have a verbose flag callbacks can access to control their + output. For example the default instance repr prints all attributes and + methods that are not prefixed by an underscore if the printer is in + verbose mode. + """ + + def __init__(self, output, verbose=False, max_width=79, newline='\n', + singleton_pprinters=None, type_pprinters=None, deferred_pprinters=None, + max_seq_length=MAX_SEQ_LENGTH): + + PrettyPrinter.__init__(self, output, max_width, newline, max_seq_length=max_seq_length) + self.verbose = verbose + self.stack = [] + if singleton_pprinters is None: + singleton_pprinters = _singleton_pprinters.copy() + self.singleton_pprinters = singleton_pprinters + if type_pprinters is None: + type_pprinters = _type_pprinters.copy() + self.type_pprinters = type_pprinters + if deferred_pprinters is None: + deferred_pprinters = _deferred_type_pprinters.copy() + self.deferred_pprinters = deferred_pprinters + + def pretty(self, obj): + """Pretty print the given object.""" + obj_id = id(obj) + cycle = obj_id in self.stack + self.stack.append(obj_id) + self.begin_group() + try: + obj_class = _safe_getattr(obj, '__class__', None) or type(obj) + # First try to find registered singleton printers for the type. + try: + printer = self.singleton_pprinters[obj_id] + except (TypeError, KeyError): + pass + else: + return printer(obj, self, cycle) + # Next walk the mro and check for either: + # 1) a registered printer + # 2) a _repr_pretty_ method + for cls in _get_mro(obj_class): + if cls in self.type_pprinters: + # printer registered in self.type_pprinters + return self.type_pprinters[cls](obj, self, cycle) + else: + # deferred printer + printer = self._in_deferred_types(cls) + if printer is not None: + return printer(obj, self, cycle) + else: + # Finally look for special method names. + # Some objects automatically create any requested + # attribute. Try to ignore most of them by checking for + # callability. + if '_repr_pretty_' in cls.__dict__: + meth = cls._repr_pretty_ + if callable(meth): + return meth(obj, self, cycle) + if cls is not object \ + and callable(cls.__dict__.get('__repr__')): + return _repr_pprint(obj, self, cycle) + + return _default_pprint(obj, self, cycle) + finally: + self.end_group() + self.stack.pop() + + def _in_deferred_types(self, cls): + """ + Check if the given class is specified in the deferred type registry. + + Returns the printer from the registry if it exists, and None if the + class is not in the registry. Successful matches will be moved to the + regular type registry for future use. + """ + mod = _safe_getattr(cls, '__module__', None) + name = _safe_getattr(cls, '__name__', None) + key = (mod, name) + printer = None + if key in self.deferred_pprinters: + # Move the printer over to the regular registry. + printer = self.deferred_pprinters.pop(key) + self.type_pprinters[cls] = printer + return printer + + +class Printable(object): + + def output(self, stream, output_width): + return output_width + + +class Text(Printable): + + def __init__(self): + self.objs = [] + self.width = 0 + + def output(self, stream, output_width): + for obj in self.objs: + stream.write(obj) + return output_width + self.width + + def add(self, obj, width): + self.objs.append(obj) + self.width += width + + +class Breakable(Printable): + + def __init__(self, seq, width, pretty): + self.obj = seq + self.width = width + self.pretty = pretty + self.indentation = pretty.indentation + self.group = pretty.group_stack[-1] + self.group.breakables.append(self) + + def output(self, stream, output_width): + self.group.breakables.popleft() + if self.group.want_break: + stream.write(self.pretty.newline) + stream.write(' ' * self.indentation) + return self.indentation + if not self.group.breakables: + self.pretty.group_queue.remove(self.group) + stream.write(self.obj) + return output_width + self.width + + +class Group(Printable): + + def __init__(self, depth): + self.depth = depth + self.breakables = deque() + self.want_break = False + + +class GroupQueue(object): + + def __init__(self, *groups): + self.queue = [] + for group in groups: + self.enq(group) + + def enq(self, group): + depth = group.depth + while depth > len(self.queue) - 1: + self.queue.append([]) + self.queue[depth].append(group) + + def deq(self): + for stack in self.queue: + for idx, group in enumerate(reversed(stack)): + if group.breakables: + del stack[idx] + group.want_break = True + return group + for group in stack: + group.want_break = True + del stack[:] + + def remove(self, group): + try: + self.queue[group.depth].remove(group) + except ValueError: + pass + + +class RawText: + """ Object such that ``p.pretty(RawText(value))`` is the same as ``p.text(value)``. + + An example usage of this would be to show a list as binary numbers, using + ``p.pretty([RawText(bin(i)) for i in integers])``. + """ + def __init__(self, value): + self.value = value + + def _repr_pretty_(self, p, cycle): + p.text(self.value) + + +class CallExpression: + """ Object which emits a line-wrapped call expression in the form `__name(*args, **kwargs)` """ + def __init__(__self, __name, *args, **kwargs): + # dunders are to avoid clashes with kwargs, as python's name manging + # will kick in. + self = __self + self.name = __name + self.args = args + self.kwargs = kwargs + + @classmethod + def factory(cls, name): + def inner(*args, **kwargs): + return cls(name, *args, **kwargs) + return inner + + def _repr_pretty_(self, p, cycle): + # dunders are to avoid clashes with kwargs, as python's name manging + # will kick in. + + started = False + def new_item(): + nonlocal started + if started: + p.text(",") + p.breakable() + started = True + + prefix = self.name + "(" + with p.group(len(prefix), prefix, ")"): + for arg in self.args: + new_item() + p.pretty(arg) + for arg_name, arg in self.kwargs.items(): + new_item() + arg_prefix = arg_name + "=" + with p.group(len(arg_prefix), arg_prefix): + p.pretty(arg) + + +class RawStringLiteral: + """ Wrapper that shows a string with a `r` prefix """ + def __init__(self, value): + self.value = value + + def _repr_pretty_(self, p, cycle): + base_repr = repr(self.value) + if base_repr[:1] in 'uU': + base_repr = base_repr[1:] + prefix = 'ur' + else: + prefix = 'r' + base_repr = prefix + base_repr.replace('\\\\', '\\') + p.text(base_repr) + + +def _default_pprint(obj, p, cycle): + """ + The default print function. Used if an object does not provide one and + it's none of the builtin objects. + """ + klass = _safe_getattr(obj, '__class__', None) or type(obj) + if _safe_getattr(klass, '__repr__', None) is not object.__repr__: + # A user-provided repr. Find newlines and replace them with p.break_() + _repr_pprint(obj, p, cycle) + return + p.begin_group(1, '<') + p.pretty(klass) + p.text(' at 0x%x' % id(obj)) + if cycle: + p.text(' ...') + elif p.verbose: + first = True + for key in dir(obj): + if not key.startswith('_'): + try: + value = getattr(obj, key) + except AttributeError: + continue + if isinstance(value, types.MethodType): + continue + if not first: + p.text(',') + p.breakable() + p.text(key) + p.text('=') + step = len(key) + 1 + p.indentation += step + p.pretty(value) + p.indentation -= step + first = False + p.end_group(1, '>') + + +def _seq_pprinter_factory(start, end): + """ + Factory that returns a pprint function useful for sequences. Used by + the default pprint for tuples and lists. + """ + def inner(obj, p, cycle): + if cycle: + return p.text(start + '...' + end) + step = len(start) + p.begin_group(step, start) + for idx, x in p._enumerate(obj): + if idx: + p.text(',') + p.breakable() + p.pretty(x) + if len(obj) == 1 and isinstance(obj, tuple): + # Special case for 1-item tuples. + p.text(',') + p.end_group(step, end) + return inner + + +def _set_pprinter_factory(start, end): + """ + Factory that returns a pprint function useful for sets and frozensets. + """ + def inner(obj, p, cycle): + if cycle: + return p.text(start + '...' + end) + if len(obj) == 0: + # Special case. + p.text(type(obj).__name__ + '()') + else: + step = len(start) + p.begin_group(step, start) + # Like dictionary keys, we will try to sort the items if there aren't too many + if not (p.max_seq_length and len(obj) >= p.max_seq_length): + items = _sorted_for_pprint(obj) + else: + items = obj + for idx, x in p._enumerate(items): + if idx: + p.text(',') + p.breakable() + p.pretty(x) + p.end_group(step, end) + return inner + + +def _dict_pprinter_factory(start, end): + """ + Factory that returns a pprint function used by the default pprint of + dicts and dict proxies. + """ + def inner(obj, p, cycle): + if cycle: + return p.text('{...}') + step = len(start) + p.begin_group(step, start) + keys = obj.keys() + for idx, key in p._enumerate(keys): + if idx: + p.text(',') + p.breakable() + p.pretty(key) + p.text(': ') + p.pretty(obj[key]) + p.end_group(step, end) + return inner + + +def _super_pprint(obj, p, cycle): + """The pprint for the super type.""" + p.begin_group(8, '') + + + +class _ReFlags: + def __init__(self, value): + self.value = value + + def _repr_pretty_(self, p, cycle): + done_one = False + for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL', + 'UNICODE', 'VERBOSE', 'DEBUG'): + if self.value & getattr(re, flag): + if done_one: + p.text('|') + p.text('re.' + flag) + done_one = True + + +def _re_pattern_pprint(obj, p, cycle): + """The pprint function for regular expression patterns.""" + re_compile = CallExpression.factory('re.compile') + if obj.flags: + p.pretty(re_compile(RawStringLiteral(obj.pattern), _ReFlags(obj.flags))) + else: + p.pretty(re_compile(RawStringLiteral(obj.pattern))) + + +def _types_simplenamespace_pprint(obj, p, cycle): + """The pprint function for types.SimpleNamespace.""" + namespace = CallExpression.factory('namespace') + if cycle: + p.pretty(namespace(RawText("..."))) + else: + p.pretty(namespace(**obj.__dict__)) + + +def _type_pprint(obj, p, cycle): + """The pprint for classes and types.""" + # Heap allocated types might not have the module attribute, + # and others may set it to None. + + # Checks for a __repr__ override in the metaclass. Can't compare the + # type(obj).__repr__ directly because in PyPy the representation function + # inherited from type isn't the same type.__repr__ + if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]: + _repr_pprint(obj, p, cycle) + return + + mod = _safe_getattr(obj, '__module__', None) + try: + name = obj.__qualname__ + if not isinstance(name, str): + # This can happen if the type implements __qualname__ as a property + # or other descriptor in Python 2. + raise Exception("Try __name__") + except Exception: + name = obj.__name__ + if not isinstance(name, str): + name = '' + + if mod in (None, '__builtin__', 'builtins', 'exceptions'): + p.text(name) + else: + p.text(mod + '.' + name) + + +def _repr_pprint(obj, p, cycle): + """A pprint that just redirects to the normal repr function.""" + # Find newlines and replace them with p.break_() + output = repr(obj) + lines = output.splitlines() + with p.group(): + for idx, output_line in enumerate(lines): + if idx: + p.break_() + p.text(output_line) + + +def _function_pprint(obj, p, cycle): + """Base pprint for all functions and builtin functions.""" + name = _safe_getattr(obj, '__qualname__', obj.__name__) + mod = obj.__module__ + if mod and mod not in ('__builtin__', 'builtins', 'exceptions'): + name = mod + '.' + name + try: + func_def = name + str(signature(obj)) + except ValueError: + func_def = name + p.text('' % func_def) + + +def _exception_pprint(obj, p, cycle): + """Base pprint for all exceptions.""" + name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__) + if obj.__class__.__module__ not in ('exceptions', 'builtins'): + name = '%s.%s' % (obj.__class__.__module__, name) + + p.pretty(CallExpression(name, *getattr(obj, 'args', ()))) + + +#: the exception base +try: + _exception_base = BaseException +except NameError: + _exception_base = Exception + + +#: printers for builtin types +_type_pprinters = { + int: _repr_pprint, + float: _repr_pprint, + str: _repr_pprint, + tuple: _seq_pprinter_factory('(', ')'), + list: _seq_pprinter_factory('[', ']'), + dict: _dict_pprinter_factory('{', '}'), + set: _set_pprinter_factory('{', '}'), + frozenset: _set_pprinter_factory('frozenset({', '})'), + super: _super_pprint, + _re_pattern_type: _re_pattern_pprint, + type: _type_pprint, + types.FunctionType: _function_pprint, + types.BuiltinFunctionType: _function_pprint, + types.MethodType: _repr_pprint, + types.SimpleNamespace: _types_simplenamespace_pprint, + datetime.datetime: _repr_pprint, + datetime.timedelta: _repr_pprint, + _exception_base: _exception_pprint +} + +# render os.environ like a dict +_env_type = type(os.environ) +# future-proof in case os.environ becomes a plain dict? +if _env_type is not dict: + _type_pprinters[_env_type] = _dict_pprinter_factory('environ{', '}') + +try: + # In PyPy, types.DictProxyType is dict, setting the dictproxy printer + # using dict.setdefault avoids overwriting the dict printer + _type_pprinters.setdefault(types.DictProxyType, + _dict_pprinter_factory('dict_proxy({', '})')) + _type_pprinters[types.ClassType] = _type_pprint + _type_pprinters[types.SliceType] = _repr_pprint +except AttributeError: # Python 3 + _type_pprinters[types.MappingProxyType] = \ + _dict_pprinter_factory('mappingproxy({', '})') + _type_pprinters[slice] = _repr_pprint + +_type_pprinters[range] = _repr_pprint +_type_pprinters[bytes] = _repr_pprint + +#: printers for types specified by name +_deferred_type_pprinters = { +} + +def for_type(typ, func): + """ + Add a pretty printer for a given type. + """ + oldfunc = _type_pprinters.get(typ, None) + if func is not None: + # To support easy restoration of old pprinters, we need to ignore Nones. + _type_pprinters[typ] = func + return oldfunc + +def for_type_by_name(type_module, type_name, func): + """ + Add a pretty printer for a type specified by the module and name of a type + rather than the type object itself. + """ + key = (type_module, type_name) + oldfunc = _deferred_type_pprinters.get(key, None) + if func is not None: + # To support easy restoration of old pprinters, we need to ignore Nones. + _deferred_type_pprinters[key] = func + return oldfunc + + +#: printers for the default singletons +_singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis, + NotImplemented]), _repr_pprint) + + +def _defaultdict_pprint(obj, p, cycle): + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + else: + p.pretty(cls_ctor(obj.default_factory, dict(obj))) + +def _ordereddict_pprint(obj, p, cycle): + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + elif len(obj): + p.pretty(cls_ctor(list(obj.items()))) + else: + p.pretty(cls_ctor()) + +def _deque_pprint(obj, p, cycle): + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + elif obj.maxlen is not None: + p.pretty(cls_ctor(list(obj), maxlen=obj.maxlen)) + else: + p.pretty(cls_ctor(list(obj))) + +def _counter_pprint(obj, p, cycle): + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + elif len(obj): + p.pretty(cls_ctor(dict(obj))) + else: + p.pretty(cls_ctor()) + + +def _userlist_pprint(obj, p, cycle): + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + else: + p.pretty(cls_ctor(obj.data)) + + +for_type_by_name('collections', 'defaultdict', _defaultdict_pprint) +for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint) +for_type_by_name('collections', 'deque', _deque_pprint) +for_type_by_name('collections', 'Counter', _counter_pprint) +for_type_by_name("collections", "UserList", _userlist_pprint) + +if __name__ == '__main__': + from random import randrange + class Foo(object): + def __init__(self): + self.foo = 1 + self.bar = re.compile(r'\s+') + self.blub = dict.fromkeys(range(30), randrange(1, 40)) + self.hehe = 23424.234234 + self.list = ["blub", "blah", self] + + def get_foo(self): + print("foo") + + pprint(Foo(), verbose=True) diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__init__.py b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..0e7a777ca Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_backgroundjobs.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_backgroundjobs.cpython-310.pyc new file mode 100644 index 000000000..f214c7b7d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_backgroundjobs.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_clipboard.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_clipboard.cpython-310.pyc new file mode 100644 index 000000000..fada0eb27 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_clipboard.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_deepreload.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_deepreload.cpython-310.pyc new file mode 100644 index 000000000..6b0d86edd Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_deepreload.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_display.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_display.cpython-310.pyc new file mode 100644 index 000000000..895c71afe Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_display.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_editorhooks.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_editorhooks.cpython-310.pyc new file mode 100644 index 000000000..a270f2939 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_editorhooks.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_imports.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_imports.cpython-310.pyc new file mode 100644 index 000000000..a17efe63e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_imports.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_latextools.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_latextools.cpython-310.pyc new file mode 100644 index 000000000..d0aa3dccc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_latextools.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_lexers.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_lexers.cpython-310.pyc new file mode 100644 index 000000000..4db7fccd3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_lexers.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_pretty.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_pretty.cpython-310.pyc new file mode 100644 index 000000000..c129bbdf0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_pretty.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_pygments.cpython-310.pyc b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_pygments.cpython-310.pyc new file mode 100644 index 000000000..f69c0d122 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/__pycache__/test_pygments.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/test.wav b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test.wav new file mode 100644 index 000000000..aa74fc5bd Binary files /dev/null and b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test.wav differ diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_backgroundjobs.py b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_backgroundjobs.py new file mode 100644 index 000000000..fc76ff198 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_backgroundjobs.py @@ -0,0 +1,85 @@ +"""Tests for pylab tools module. +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2011, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +# Stdlib imports +import time + +# Our own imports +from IPython.lib import backgroundjobs as bg + +#----------------------------------------------------------------------------- +# Globals and constants +#----------------------------------------------------------------------------- +t_short = 0.0001 # very short interval to wait on jobs + +#----------------------------------------------------------------------------- +# Local utilities +#----------------------------------------------------------------------------- +def sleeper(interval=t_short, *a, **kw): + args = dict(interval=interval, + other_args=a, + kw_args=kw) + time.sleep(interval) + return args + +def crasher(interval=t_short, *a, **kw): + time.sleep(interval) + raise Exception("Dead job with interval %s" % interval) + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +def test_result(): + """Test job submission and result retrieval""" + jobs = bg.BackgroundJobManager() + j = jobs.new(sleeper) + j.join() + assert j.result["interval"] == t_short + + +def test_flush(): + """Test job control""" + jobs = bg.BackgroundJobManager() + j = jobs.new(sleeper) + j.join() + assert len(jobs.completed) == 1 + assert len(jobs.dead) == 0 + jobs.flush() + assert len(jobs.completed) == 0 + + +def test_dead(): + """Test control of dead jobs""" + jobs = bg.BackgroundJobManager() + j = jobs.new(crasher) + j.join() + assert len(jobs.completed) == 0 + assert len(jobs.dead) == 1 + jobs.flush() + assert len(jobs.dead) == 0 + + +def test_longer(): + """Test control of longer-running jobs""" + jobs = bg.BackgroundJobManager() + # Sleep for long enough for the following two checks to still report the + # job as running, but not so long that it makes the test suite noticeably + # slower. + j = jobs.new(sleeper, 0.1) + assert len(jobs.running) == 1 + assert len(jobs.completed) == 0 + j.join() + assert len(jobs.running) == 0 + assert len(jobs.completed) == 1 diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_clipboard.py b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_clipboard.py new file mode 100644 index 000000000..6597c946b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_clipboard.py @@ -0,0 +1,20 @@ +from IPython.core.error import TryNext +from IPython.lib.clipboard import ClipboardEmpty +from IPython.testing.decorators import skip_if_no_x11 + + +@skip_if_no_x11 +def test_clipboard_get(): + # Smoketest for clipboard access - we can't easily guarantee that the + # clipboard is accessible and has something on it, but this tries to + # exercise the relevant code anyway. + try: + a = get_ipython().hooks.clipboard_get() + except ClipboardEmpty: + # Nothing in clipboard to get + pass + except TryNext: + # No clipboard access API available + pass + else: + assert isinstance(a, str) diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_deepreload.py b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_deepreload.py new file mode 100644 index 000000000..5da606cb8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_deepreload.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +"""Test suite for the deepreload module.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + +import types +from pathlib import Path + +import pytest +from tempfile import TemporaryDirectory + +from IPython.lib.deepreload import modules_reloading +from IPython.lib.deepreload import reload as dreload +from IPython.utils.syspathcontext import prepended_to_syspath + + +def test_deepreload(): + "Test that dreload does deep reloads and skips excluded modules." + with TemporaryDirectory() as tmpdir: + with prepended_to_syspath(tmpdir): + tmpdirpath = Path(tmpdir) + with open(tmpdirpath / "A.py", "w", encoding="utf-8") as f: + f.write("class Object:\n pass\nok = True\n") + with open(tmpdirpath / "B.py", "w", encoding="utf-8") as f: + f.write("import A\nassert A.ok, 'we are fine'\n") + import A + import B + + # Test that A is not reloaded. + obj = A.Object() + dreload(B, exclude=["A"]) + assert isinstance(obj, A.Object) is True + + # Test that an import failure will not blow-up us. + A.ok = False + with pytest.raises(AssertionError, match="we are fine"): + dreload(B, exclude=["A"]) + assert len(modules_reloading) == 0 + assert not A.ok + + # Test that A is reloaded. + obj = A.Object() + A.ok = False + dreload(B) + assert A.ok + assert isinstance(obj, A.Object) is False + + +def test_not_module(): + pytest.raises(TypeError, dreload, "modulename") + + +def test_not_in_sys_modules(): + fake_module = types.ModuleType("fake_module") + with pytest.raises(ImportError, match="not in sys.modules"): + dreload(fake_module) diff --git a/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_display.py b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_display.py new file mode 100644 index 000000000..f5ed34c91 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/IPython/lib/tests/test_display.py @@ -0,0 +1,272 @@ +"""Tests for IPython.lib.display. + +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +from tempfile import NamedTemporaryFile, mkdtemp +from os.path import split, join as pjoin, dirname +import pathlib +from unittest import TestCase, mock +import struct +import wave +from io import BytesIO + +# Third-party imports +import pytest + +try: + import numpy +except ImportError: + pass + +# Our own imports +from IPython.lib import display + +from IPython.testing.decorators import skipif_not_numpy + +#----------------------------------------------------------------------------- +# Classes and functions +#----------------------------------------------------------------------------- + +#-------------------------- +# FileLink tests +#-------------------------- + +def test_instantiation_FileLink(): + """FileLink: Test class can be instantiated""" + fl = display.FileLink('example.txt') + # TODO: remove if when only Python >= 3.6 is supported + fl = display.FileLink(pathlib.PurePath('example.txt')) + +def test_warning_on_non_existent_path_FileLink(): + """FileLink: Calling _repr_html_ on non-existent files returns a warning""" + fl = display.FileLink("example.txt") + assert fl._repr_html_().startswith("Path (example.txt)") + + +def test_existing_path_FileLink(): + """FileLink: Calling _repr_html_ functions as expected on existing filepath + """ + tf = NamedTemporaryFile() + fl = display.FileLink(tf.name) + actual = fl._repr_html_() + expected = "%s
" % (tf.name, tf.name) + assert actual == expected + + +def test_existing_path_FileLink_repr(): + """FileLink: Calling repr() functions as expected on existing filepath + """ + tf = NamedTemporaryFile() + fl = display.FileLink(tf.name) + actual = repr(fl) + expected = tf.name + assert actual == expected + + +def test_error_on_directory_to_FileLink(): + """FileLink: Raises error when passed directory + """ + td = mkdtemp() + pytest.raises(ValueError, display.FileLink, td) + +#-------------------------- +# FileLinks tests +#-------------------------- + +def test_instantiation_FileLinks(): + """FileLinks: Test class can be instantiated + """ + fls = display.FileLinks('example') + +def test_warning_on_non_existent_path_FileLinks(): + """FileLinks: Calling _repr_html_ on non-existent files returns a warning""" + fls = display.FileLinks("example") + assert fls._repr_html_().startswith("Path (example)") + + +def test_existing_path_FileLinks(): + """FileLinks: Calling _repr_html_ functions as expected on existing dir + """ + td = mkdtemp() + tf1 = NamedTemporaryFile(dir=td) + tf2 = NamedTemporaryFile(dir=td) + fl = display.FileLinks(td) + actual = fl._repr_html_() + actual = actual.split('\n') + actual.sort() + # the links should always have forward slashes, even on windows, so replace + # backslashes with forward slashes here + expected = ["%s/
" % td, + "  %s
" %\ + (tf2.name.replace("\\","/"),split(tf2.name)[1]), + "  %s
" %\ + (tf1.name.replace("\\","/"),split(tf1.name)[1])] + expected.sort() + # We compare the sorted list of links here as that's more reliable + assert actual == expected + + +def test_existing_path_FileLinks_alt_formatter(): + """FileLinks: Calling _repr_html_ functions as expected w/ an alt formatter + """ + td = mkdtemp() + tf1 = NamedTemporaryFile(dir=td) + tf2 = NamedTemporaryFile(dir=td) + def fake_formatter(dirname,fnames,included_suffixes): + return ["hello","world"] + fl = display.FileLinks(td,notebook_display_formatter=fake_formatter) + actual = fl._repr_html_() + actual = actual.split('\n') + actual.sort() + expected = ["hello","world"] + expected.sort() + # We compare the sorted list of links here as that's more reliable + assert actual == expected + + +def test_existing_path_FileLinks_repr(): + """FileLinks: Calling repr() functions as expected on existing directory """ + td = mkdtemp() + tf1 = NamedTemporaryFile(dir=td) + tf2 = NamedTemporaryFile(dir=td) + fl = display.FileLinks(td) + actual = repr(fl) + actual = actual.split('\n') + actual.sort() + expected = ['%s/' % td, ' %s' % split(tf1.name)[1],' %s' % split(tf2.name)[1]] + expected.sort() + # We compare the sorted list of links here as that's more reliable + assert actual == expected + + +def test_existing_path_FileLinks_repr_alt_formatter(): + """FileLinks: Calling repr() functions as expected w/ alt formatter + """ + td = mkdtemp() + tf1 = NamedTemporaryFile(dir=td) + tf2 = NamedTemporaryFile(dir=td) + def fake_formatter(dirname,fnames,included_suffixes): + return ["hello","world"] + fl = display.FileLinks(td,terminal_display_formatter=fake_formatter) + actual = repr(fl) + actual = actual.split('\n') + actual.sort() + expected = ["hello","world"] + expected.sort() + # We compare the sorted list of links here as that's more reliable + assert actual == expected + + +def test_error_on_file_to_FileLinks(): + """FileLinks: Raises error when passed file + """ + td = mkdtemp() + tf1 = NamedTemporaryFile(dir=td) + pytest.raises(ValueError, display.FileLinks, tf1.name) + + +def test_recursive_FileLinks(): + """FileLinks: Does not recurse when recursive=False + """ + td = mkdtemp() + tf = NamedTemporaryFile(dir=td) + subtd = mkdtemp(dir=td) + subtf = NamedTemporaryFile(dir=subtd) + fl = display.FileLinks(td) + actual = str(fl) + actual = actual.split('\n') + assert len(actual) == 4, actual + fl = display.FileLinks(td, recursive=False) + actual = str(fl) + actual = actual.split('\n') + assert len(actual) == 2, actual + +def test_audio_from_file(): + path = pjoin(dirname(__file__), 'test.wav') + display.Audio(filename=path) + +class TestAudioDataWithNumpy(TestCase): + + @skipif_not_numpy + def test_audio_from_numpy_array(self): + test_tone = get_test_tone() + audio = display.Audio(test_tone, rate=44100) + assert len(read_wav(audio.data)) == len(test_tone) + + @skipif_not_numpy + def test_audio_from_list(self): + test_tone = get_test_tone() + audio = display.Audio(list(test_tone), rate=44100) + assert len(read_wav(audio.data)) == len(test_tone) + + @skipif_not_numpy + def test_audio_from_numpy_array_without_rate_raises(self): + self.assertRaises(ValueError, display.Audio, get_test_tone()) + + @skipif_not_numpy + def test_audio_data_normalization(self): + expected_max_value = numpy.iinfo(numpy.int16).max + for scale in [1, 0.5, 2]: + audio = display.Audio(get_test_tone(scale), rate=44100) + actual_max_value = numpy.max(numpy.abs(read_wav(audio.data))) + assert actual_max_value == expected_max_value + + @skipif_not_numpy + def test_audio_data_without_normalization(self): + max_int16 = numpy.iinfo(numpy.int16).max + for scale in [1, 0.5, 0.2]: + test_tone = get_test_tone(scale) + test_tone_max_abs = numpy.max(numpy.abs(test_tone)) + expected_max_value = int(max_int16 * test_tone_max_abs) + audio = display.Audio(test_tone, rate=44100, normalize=False) + actual_max_value = numpy.max(numpy.abs(read_wav(audio.data))) + assert actual_max_value == expected_max_value + + def test_audio_data_without_normalization_raises_for_invalid_data(self): + self.assertRaises( + ValueError, + lambda: display.Audio([1.001], rate=44100, normalize=False)) + self.assertRaises( + ValueError, + lambda: display.Audio([-1.001], rate=44100, normalize=False)) + +def simulate_numpy_not_installed(): + try: + import numpy + return mock.patch('numpy.array', mock.MagicMock(side_effect=ImportError)) + except ModuleNotFoundError: + return lambda x:x + +@simulate_numpy_not_installed() +class TestAudioDataWithoutNumpy(TestAudioDataWithNumpy): + # All tests from `TestAudioDataWithNumpy` are inherited. + + @skipif_not_numpy + def test_audio_raises_for_nested_list(self): + stereo_signal = [list(get_test_tone())] * 2 + self.assertRaises(TypeError, lambda: display.Audio(stereo_signal, rate=44100)) + + +@skipif_not_numpy +def get_test_tone(scale=1): + return numpy.sin(2 * numpy.pi * 440 * numpy.linspace(0, 1, 44100)) * scale + +def read_wav(data): + with wave.open(BytesIO(data)) as wave_file: + wave_data = wave_file.readframes(wave_file.getnframes()) + num_samples = wave_file.getnframes() * wave_file.getnchannels() + return struct.unpack('<%sh' % num_samples, wave_data) + +def test_code_from_file(): + c = display.Code(filename=__file__) + assert c._repr_html_().startswith(' +% endif +% if full: + + +% endif + +

Error !

+<% + tback = RichTraceback(error=error, traceback=traceback) + src = tback.source + line = tback.lineno + if src: + lines = src.split('\n') + else: + lines = None +%> +

${tback.errorname}: ${tback.message|h}

+ +% if lines: +
+
+% for index in range(max(0, line-4),min(len(lines), line+5)): + <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = index + 1 + %> + % if index + 1 == line: + <% + if pygments_html_formatter: + old_cssclass = pygments_html_formatter.cssclass + pygments_html_formatter.cssclass = 'error ' + old_cssclass + %> + ${lines[index] | syntax_highlight(language='mako')} + <% + if pygments_html_formatter: + pygments_html_formatter.cssclass = old_cssclass + %> + % else: + ${lines[index] | syntax_highlight(language='mako')} + % endif +% endfor +
+
+% endif + +
+% for (filename, lineno, function, line) in tback.reverse_traceback: +
${filename}, line ${lineno}:
+
+ <% + if pygments_html_formatter: + pygments_html_formatter.linenostart = lineno + %> +
${line | syntax_highlight(filename)}
+
+% endfor +
+ +% if full: + + +% endif +""", + output_encoding=sys.getdefaultencoding(), + encoding_errors="htmlentityreplace", + ) diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__init__.py b/myenv/lib/python3.10/site-packages/mako/ext/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..4c16ff68f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/autohandler.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/autohandler.cpython-310.pyc new file mode 100644 index 000000000..75a29bb47 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/autohandler.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/babelplugin.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/babelplugin.cpython-310.pyc new file mode 100644 index 000000000..59b38a252 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/babelplugin.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/beaker_cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/beaker_cache.cpython-310.pyc new file mode 100644 index 000000000..4c3c42fb6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/beaker_cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/extract.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/extract.cpython-310.pyc new file mode 100644 index 000000000..63b96ed06 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/extract.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/linguaplugin.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/linguaplugin.cpython-310.pyc new file mode 100644 index 000000000..57cfd41da Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/linguaplugin.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/preprocessors.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/preprocessors.cpython-310.pyc new file mode 100644 index 000000000..e5548040b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/preprocessors.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-310.pyc new file mode 100644 index 000000000..a22a0ef10 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/pygmentplugin.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/turbogears.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/turbogears.cpython-310.pyc new file mode 100644 index 000000000..39b7c5b24 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/ext/__pycache__/turbogears.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/ext/autohandler.py b/myenv/lib/python3.10/site-packages/mako/ext/autohandler.py new file mode 100644 index 000000000..5bcfc2858 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/autohandler.py @@ -0,0 +1,70 @@ +# ext/autohandler.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""adds autohandler functionality to Mako templates. + +requires that the TemplateLookup class is used with templates. + +usage:: + + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context)}"/> + + +or with custom autohandler filename:: + + <%! + from mako.ext.autohandler import autohandler + %> + <%inherit file="${autohandler(template, context, name='somefilename')}"/> + +""" + +import os +import posixpath +import re + + +def autohandler(template, context, name="autohandler"): + lookup = context.lookup + _template_uri = template.module._template_uri + if not lookup.filesystem_checks: + try: + return lookup._uri_cache[(autohandler, _template_uri, name)] + except KeyError: + pass + + tokens = re.findall(r"([^/]+)", posixpath.dirname(_template_uri)) + [name] + while len(tokens): + path = "/" + "/".join(tokens) + if path != _template_uri and _file_exists(lookup, path): + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), path + ) + else: + return path + if len(tokens) == 1: + break + tokens[-2:] = [name] + + if not lookup.filesystem_checks: + return lookup._uri_cache.setdefault( + (autohandler, _template_uri, name), None + ) + else: + return None + + +def _file_exists(lookup, path): + psub = re.sub(r"^/", "", path) + for d in lookup.directories: + if os.path.exists(d + "/" + psub): + return True + else: + return False diff --git a/myenv/lib/python3.10/site-packages/mako/ext/babelplugin.py b/myenv/lib/python3.10/site-packages/mako/ext/babelplugin.py new file mode 100644 index 000000000..907d0b808 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/babelplugin.py @@ -0,0 +1,57 @@ +# ext/babelplugin.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""gettext message extraction via Babel: https://pypi.org/project/Babel/""" +from babel.messages.extract import extract_python + +from mako.ext.extract import MessageExtractor + + +class BabelMakoExtractor(MessageExtractor): + def __init__(self, keywords, comment_tags, options): + self.keywords = keywords + self.options = options + self.config = { + "comment-tags": " ".join(comment_tags), + "encoding": options.get( + "input_encoding", options.get("encoding", None) + ), + } + super().__init__() + + def __call__(self, fileobj): + return self.process_file(fileobj) + + def process_python(self, code, code_lineno, translator_strings): + comment_tags = self.config["comment-tags"] + for ( + lineno, + funcname, + messages, + python_translator_comments, + ) in extract_python(code, self.keywords, comment_tags, self.options): + yield ( + code_lineno + (lineno - 1), + funcname, + messages, + translator_strings + python_translator_comments, + ) + + +def extract(fileobj, keywords, comment_tags, options): + """Extract messages from Mako templates. + + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :return: an iterator over ``(lineno, funcname, message, comments)`` tuples + :rtype: ``iterator`` + """ + extractor = BabelMakoExtractor(keywords, comment_tags, options) + yield from extractor(fileobj) diff --git a/myenv/lib/python3.10/site-packages/mako/ext/beaker_cache.py b/myenv/lib/python3.10/site-packages/mako/ext/beaker_cache.py new file mode 100644 index 000000000..9aa35b068 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/beaker_cache.py @@ -0,0 +1,82 @@ +# ext/beaker_cache.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provide a :class:`.CacheImpl` for the Beaker caching system.""" + +from mako import exceptions +from mako.cache import CacheImpl + +try: + from beaker import cache as beaker_cache +except: + has_beaker = False +else: + has_beaker = True + +_beaker_cache = None + + +class BeakerCacheImpl(CacheImpl): + + """A :class:`.CacheImpl` provided for the Beaker caching system. + + This plugin is used by default, based on the default + value of ``'beaker'`` for the ``cache_impl`` parameter of the + :class:`.Template` or :class:`.TemplateLookup` classes. + + """ + + def __init__(self, cache): + if not has_beaker: + raise exceptions.RuntimeException( + "Can't initialize Beaker plugin; Beaker is not installed." + ) + global _beaker_cache + if _beaker_cache is None: + if "manager" in cache.template.cache_args: + _beaker_cache = cache.template.cache_args["manager"] + else: + _beaker_cache = beaker_cache.CacheManager() + super().__init__(cache) + + def _get_cache(self, **kw): + expiretime = kw.pop("timeout", None) + if "dir" in kw: + kw["data_dir"] = kw.pop("dir") + elif self.cache.template.module_directory: + kw["data_dir"] = self.cache.template.module_directory + + if "manager" in kw: + kw.pop("manager") + + if kw.get("type") == "memcached": + kw["type"] = "ext:memcached" + + if "region" in kw: + region = kw.pop("region") + cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) + else: + cache = _beaker_cache.get_cache(self.cache.id, **kw) + cache_args = {"starttime": self.cache.starttime} + if expiretime: + cache_args["expiretime"] = expiretime + return cache, cache_args + + def get_or_create(self, key, creation_function, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, createfunc=creation_function, **kw) + + def put(self, key, value, **kw): + cache, kw = self._get_cache(**kw) + cache.put(key, value, **kw) + + def get(self, key, **kw): + cache, kw = self._get_cache(**kw) + return cache.get(key, **kw) + + def invalidate(self, key, **kw): + cache, kw = self._get_cache(**kw) + cache.remove_value(key, **kw) diff --git a/myenv/lib/python3.10/site-packages/mako/ext/extract.py b/myenv/lib/python3.10/site-packages/mako/ext/extract.py new file mode 100644 index 000000000..9d33ee184 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/extract.py @@ -0,0 +1,129 @@ +# ext/extract.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from io import BytesIO +from io import StringIO +import re + +from mako import lexer +from mako import parsetree + + +class MessageExtractor: + use_bytes = True + + def process_file(self, fileobj): + template_node = lexer.Lexer( + fileobj.read(), input_encoding=self.config["encoding"] + ).parse() + yield from self.extract_nodes(template_node.get_children()) + + def extract_nodes(self, nodes): + translator_comments = [] + in_translator_comments = False + input_encoding = self.config["encoding"] or "ascii" + comment_tags = list( + filter(None, re.split(r"\s+", self.config["comment-tags"])) + ) + + for node in nodes: + child_nodes = None + if ( + in_translator_comments + and isinstance(node, parsetree.Text) + and not node.content.strip() + ): + # Ignore whitespace within translator comments + continue + + if isinstance(node, parsetree.Comment): + value = node.text.strip() + if in_translator_comments: + translator_comments.extend( + self._split_comment(node.lineno, value) + ) + continue + for comment_tag in comment_tags: + if value.startswith(comment_tag): + in_translator_comments = True + translator_comments.extend( + self._split_comment(node.lineno, value) + ) + continue + + if isinstance(node, parsetree.DefTag): + code = node.function_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.BlockTag): + code = node.body_decl.code + child_nodes = node.nodes + elif isinstance(node, parsetree.CallTag): + code = node.code.code + child_nodes = node.nodes + elif isinstance(node, parsetree.PageTag): + code = node.body_decl.code + elif isinstance(node, parsetree.CallNamespaceTag): + code = node.expression + child_nodes = node.nodes + elif isinstance(node, parsetree.ControlLine): + if node.isend: + in_translator_comments = False + continue + code = node.text + elif isinstance(node, parsetree.Code): + in_translator_comments = False + code = node.code.code + elif isinstance(node, parsetree.Expression): + code = node.code.code + else: + continue + + # Comments don't apply unless they immediately precede the message + if ( + translator_comments + and translator_comments[-1][0] < node.lineno - 1 + ): + translator_comments = [] + + translator_strings = [ + comment[1] for comment in translator_comments + ] + + if isinstance(code, str) and self.use_bytes: + code = code.encode(input_encoding, "backslashreplace") + + used_translator_comments = False + # We add extra newline to work around a pybabel bug + # (see python-babel/babel#274, parse_encoding dies if the first + # input string of the input is non-ascii) + # Also, because we added it, we have to subtract one from + # node.lineno + if self.use_bytes: + code = BytesIO(b"\n" + code) + else: + code = StringIO("\n" + code) + + for message in self.process_python( + code, node.lineno - 1, translator_strings + ): + yield message + used_translator_comments = True + + if used_translator_comments: + translator_comments = [] + in_translator_comments = False + + if child_nodes: + yield from self.extract_nodes(child_nodes) + + @staticmethod + def _split_comment(lineno, comment): + """Return the multiline comment at lineno split into a list of + comment line numbers and the accompanying comment line""" + return [ + (lineno + index, line) + for index, line in enumerate(comment.splitlines()) + ] diff --git a/myenv/lib/python3.10/site-packages/mako/ext/linguaplugin.py b/myenv/lib/python3.10/site-packages/mako/ext/linguaplugin.py new file mode 100644 index 000000000..efb04c70a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/linguaplugin.py @@ -0,0 +1,57 @@ +# ext/linguaplugin.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import contextlib +import io + +from lingua.extractors import Extractor +from lingua.extractors import get_extractor +from lingua.extractors import Message + +from mako.ext.extract import MessageExtractor + + +class LinguaMakoExtractor(Extractor, MessageExtractor): + """Mako templates""" + + use_bytes = False + extensions = [".mako"] + default_config = {"encoding": "utf-8", "comment-tags": ""} + + def __call__(self, filename, options, fileobj=None): + self.options = options + self.filename = filename + self.python_extractor = get_extractor("x.py") + if fileobj is None: + ctx = open(filename, "r") + else: + ctx = contextlib.nullcontext(fileobj) + with ctx as file_: + yield from self.process_file(file_) + + def process_python(self, code, code_lineno, translator_strings): + source = code.getvalue().strip() + if source.endswith(":"): + if source in ("try:", "else:") or source.startswith("except"): + source = "" # Ignore try/except and else + elif source.startswith("elif"): + source = source[2:] # Replace "elif" with "if" + source += "pass" + code = io.StringIO(source) + for msg in self.python_extractor( + self.filename, self.options, code, code_lineno - 1 + ): + if translator_strings: + msg = Message( + msg.msgctxt, + msg.msgid, + msg.msgid_plural, + msg.flags, + " ".join(translator_strings + [msg.comment]), + msg.tcomment, + msg.location, + ) + yield msg diff --git a/myenv/lib/python3.10/site-packages/mako/ext/preprocessors.py b/myenv/lib/python3.10/site-packages/mako/ext/preprocessors.py new file mode 100644 index 000000000..80403ecda --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/preprocessors.py @@ -0,0 +1,20 @@ +# ext/preprocessors.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""preprocessing functions, used with the 'preprocessor' +argument on Template, TemplateLookup""" + +import re + + +def convert_comments(text): + """preprocess old style comments. + + example: + + from mako.ext.preprocessors import convert_comments + t = Template(..., preprocessor=convert_comments)""" + return re.sub(r"(?<=\n)\s*#[^#]", "##", text) diff --git a/myenv/lib/python3.10/site-packages/mako/ext/pygmentplugin.py b/myenv/lib/python3.10/site-packages/mako/ext/pygmentplugin.py new file mode 100644 index 000000000..9acbf4cd6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/pygmentplugin.py @@ -0,0 +1,150 @@ +# ext/pygmentplugin.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from pygments import highlight +from pygments.formatters.html import HtmlFormatter +from pygments.lexer import bygroups +from pygments.lexer import DelegatingLexer +from pygments.lexer import include +from pygments.lexer import RegexLexer +from pygments.lexer import using +from pygments.lexers.agile import Python3Lexer +from pygments.lexers.agile import PythonLexer +from pygments.lexers.web import CssLexer +from pygments.lexers.web import HtmlLexer +from pygments.lexers.web import JavascriptLexer +from pygments.lexers.web import XmlLexer +from pygments.token import Comment +from pygments.token import Keyword +from pygments.token import Name +from pygments.token import Operator +from pygments.token import Other +from pygments.token import String +from pygments.token import Text + + +class MakoLexer(RegexLexer): + name = "Mako" + aliases = ["mako"] + filenames = ["*.mao"] + + tokens = { + "root": [ + ( + r"(\s*)(\%)(\s*end(?:\w+))(\n|\Z)", + bygroups(Text, Comment.Preproc, Keyword, Other), + ), + ( + r"(\s*)(\%(?!%))([^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, using(PythonLexer), Other), + ), + ( + r"(\s*)(##[^\n]*)(\n|\Z)", + bygroups(Text, Comment.Preproc, Other), + ), + (r"""(?s)<%doc>.*?""", Comment.Preproc), + ( + r"(<%)([\w\.\:]+)", + bygroups(Comment.Preproc, Name.Builtin), + "tag", + ), + ( + r"()", + bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc), + ), + (r"<%(?=([\w\.\:]+))", Comment.Preproc, "ondeftags"), + ( + r"(?s)(<%(?:!?))(.*?)(%>)", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"(\$\{)(.*?)(\})", + bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc), + ), + ( + r"""(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line + (?=\#\*) | # multiline comment + (?=", Comment.Preproc, "#pop"), + (r"\s+", Text), + ], + "attr": [ + ('".*?"', String, "#pop"), + ("'.*?'", String, "#pop"), + (r"[^\s>]+", String, "#pop"), + ], + } + + +class MakoHtmlLexer(DelegatingLexer): + name = "HTML+Mako" + aliases = ["html+mako"] + + def __init__(self, **options): + super().__init__(HtmlLexer, MakoLexer, **options) + + +class MakoXmlLexer(DelegatingLexer): + name = "XML+Mako" + aliases = ["xml+mako"] + + def __init__(self, **options): + super().__init__(XmlLexer, MakoLexer, **options) + + +class MakoJavascriptLexer(DelegatingLexer): + name = "JavaScript+Mako" + aliases = ["js+mako", "javascript+mako"] + + def __init__(self, **options): + super().__init__(JavascriptLexer, MakoLexer, **options) + + +class MakoCssLexer(DelegatingLexer): + name = "CSS+Mako" + aliases = ["css+mako"] + + def __init__(self, **options): + super().__init__(CssLexer, MakoLexer, **options) + + +pygments_html_formatter = HtmlFormatter( + cssclass="syntax-highlighted", linenos=True +) + + +def syntax_highlight(filename="", language=None): + mako_lexer = MakoLexer() + python_lexer = Python3Lexer() + if filename.startswith("memory:") or language == "mako": + return lambda string: highlight( + string, mako_lexer, pygments_html_formatter + ) + return lambda string: highlight( + string, python_lexer, pygments_html_formatter + ) diff --git a/myenv/lib/python3.10/site-packages/mako/ext/turbogears.py b/myenv/lib/python3.10/site-packages/mako/ext/turbogears.py new file mode 100644 index 000000000..2ebf07468 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/ext/turbogears.py @@ -0,0 +1,61 @@ +# ext/turbogears.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +from mako import compat +from mako.lookup import TemplateLookup +from mako.template import Template + + +class TGPlugin: + + """TurboGears compatible Template Plugin.""" + + def __init__(self, extra_vars_func=None, options=None, extension="mak"): + self.extra_vars_func = extra_vars_func + self.extension = extension + if not options: + options = {} + + # Pull the options out and initialize the lookup + lookup_options = {} + for k, v in options.items(): + if k.startswith("mako."): + lookup_options[k[5:]] = v + elif k in ["directories", "filesystem_checks", "module_directory"]: + lookup_options[k] = v + self.lookup = TemplateLookup(**lookup_options) + + self.tmpl_options = {} + # transfer lookup args to template args, based on those available + # in getargspec + for kw in compat.inspect_getargspec(Template.__init__)[0]: + if kw in lookup_options: + self.tmpl_options[kw] = lookup_options[kw] + + def load_template(self, templatename, template_string=None): + """Loads a template from a file or a string""" + if template_string is not None: + return Template(template_string, **self.tmpl_options) + # Translate TG dot notation to normal / template path + if "/" not in templatename: + templatename = ( + "/" + templatename.replace(".", "/") + "." + self.extension + ) + + # Lookup template + return self.lookup.get_template(templatename) + + def render( + self, info, format="html", fragment=False, template=None # noqa + ): + if isinstance(template, str): + template = self.load_template(template) + + # Load extra vars func if provided + if self.extra_vars_func: + info.update(self.extra_vars_func()) + + return template.render(**info) diff --git a/myenv/lib/python3.10/site-packages/mako/filters.py b/myenv/lib/python3.10/site-packages/mako/filters.py new file mode 100644 index 000000000..af202f3f5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/filters.py @@ -0,0 +1,163 @@ +# mako/filters.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + + +import codecs +from html.entities import codepoint2name +from html.entities import name2codepoint +import re +from urllib.parse import quote_plus + +import markupsafe + +html_escape = markupsafe.escape + +xml_escapes = { + "&": "&", + ">": ">", + "<": "<", + '"': """, # also " in html-only + "'": "'", # also ' in html-only +} + + +def xml_escape(string): + return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string) + + +def url_escape(string): + # convert into a list of octets + string = string.encode("utf8") + return quote_plus(string) + + +def trim(string): + return string.strip() + + +class Decode: + def __getattr__(self, key): + def decode(x): + if isinstance(x, str): + return x + elif not isinstance(x, bytes): + return decode(str(x)) + else: + return str(x, encoding=key) + + return decode + + +decode = Decode() + + +class XMLEntityEscaper: + def __init__(self, codepoint2name, name2codepoint): + self.codepoint2entity = { + c: str("&%s;" % n) for c, n in codepoint2name.items() + } + self.name2codepoint = name2codepoint + + def escape_entities(self, text): + """Replace characters with their character entity references. + + Only characters corresponding to a named entity are replaced. + """ + return str(text).translate(self.codepoint2entity) + + def __escape(self, m): + codepoint = ord(m.group()) + try: + return self.codepoint2entity[codepoint] + except (KeyError, IndexError): + return "&#x%X;" % codepoint + + __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') + + def escape(self, text): + """Replace characters with their character references. + + Replace characters by their named entity references. + Non-ASCII characters, if they do not have a named entity reference, + are replaced by numerical character references. + + The return value is guaranteed to be ASCII. + """ + return self.__escapable.sub(self.__escape, str(text)).encode("ascii") + + # XXX: This regexp will not match all valid XML entity names__. + # (It punts on details involving involving CombiningChars and Extenders.) + # + # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef + __characterrefs = re.compile( + r"""& (?: + \#(\d+) + | \#x([\da-f]+) + | ( (?!\d) [:\w] [-.:\w]+ ) + ) ;""", + re.X | re.UNICODE, + ) + + def __unescape(self, m): + dval, hval, name = m.groups() + if dval: + codepoint = int(dval) + elif hval: + codepoint = int(hval, 16) + else: + codepoint = self.name2codepoint.get(name, 0xFFFD) + # U+FFFD = "REPLACEMENT CHARACTER" + if codepoint < 128: + return chr(codepoint) + return chr(codepoint) + + def unescape(self, text): + """Unescape character references. + + All character references (both entity references and numerical + character references) are unescaped. + """ + return self.__characterrefs.sub(self.__unescape, text) + + +_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint) + +html_entities_escape = _html_entities_escaper.escape_entities +html_entities_unescape = _html_entities_escaper.unescape + + +def htmlentityreplace_errors(ex): + """An encoding error handler. + + This python codecs error handler replaces unencodable + characters with HTML entities, or, if no HTML entity exists for + the character, XML character references:: + + >>> 'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') + 'The cost was €12.' + """ + if isinstance(ex, UnicodeEncodeError): + # Handle encoding errors + bad_text = ex.object[ex.start : ex.end] + text = _html_entities_escaper.escape(bad_text) + return (str(text), ex.end) + raise ex + + +codecs.register_error("htmlentityreplace", htmlentityreplace_errors) + + +DEFAULT_ESCAPES = { + "x": "filters.xml_escape", + "h": "filters.html_escape", + "u": "filters.url_escape", + "trim": "filters.trim", + "entity": "filters.html_entities_escape", + "unicode": "str", + "decode": "decode", + "str": "str", + "n": "n", +} diff --git a/myenv/lib/python3.10/site-packages/mako/lexer.py b/myenv/lib/python3.10/site-packages/mako/lexer.py new file mode 100644 index 000000000..75182f852 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/lexer.py @@ -0,0 +1,469 @@ +# mako/lexer.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides the Lexer class for parsing template strings into parse trees.""" + +import codecs +import re + +from mako import exceptions +from mako import parsetree +from mako.pygen import adjust_whitespace + +_regexp_cache = {} + + +class Lexer: + def __init__( + self, text, filename=None, input_encoding=None, preprocessor=None + ): + self.text = text + self.filename = filename + self.template = parsetree.TemplateNode(self.filename) + self.matched_lineno = 1 + self.matched_charpos = 0 + self.lineno = 1 + self.match_position = 0 + self.tag = [] + self.control_line = [] + self.ternary_stack = [] + self.encoding = input_encoding + + if preprocessor is None: + self.preprocessor = [] + elif not hasattr(preprocessor, "__iter__"): + self.preprocessor = [preprocessor] + else: + self.preprocessor = preprocessor + + @property + def exception_kwargs(self): + return { + "source": self.text, + "lineno": self.matched_lineno, + "pos": self.matched_charpos, + "filename": self.filename, + } + + def match(self, regexp, flags=None): + """compile the given regexp, cache the reg, and call match_reg().""" + + try: + reg = _regexp_cache[(regexp, flags)] + except KeyError: + reg = re.compile(regexp, flags) if flags else re.compile(regexp) + _regexp_cache[(regexp, flags)] = reg + + return self.match_reg(reg) + + def match_reg(self, reg): + """match the given regular expression object to the current text + position. + + if a match occurs, update the current text and line position. + + """ + + mp = self.match_position + + match = reg.match(self.text, self.match_position) + if match: + (start, end) = match.span() + self.match_position = end + 1 if end == start else end + self.matched_lineno = self.lineno + cp = mp - 1 + if cp >= 0 and cp < self.textlength: + cp = self.text[: cp + 1].rfind("\n") + self.matched_charpos = mp - cp + self.lineno += self.text[mp : self.match_position].count("\n") + return match + + def parse_until_text(self, watch_nesting, *text): + startpos = self.match_position + text_re = r"|".join(text) + brace_level = 0 + paren_level = 0 + bracket_level = 0 + while True: + match = self.match(r"#.*\n") + if match: + continue + match = self.match( + r"(\"\"\"|\'\'\'|\"|\')[^\\]*?(\\.[^\\]*?)*\1", re.S + ) + if match: + continue + match = self.match(r"(%s)" % text_re) + if match and not ( + watch_nesting + and (brace_level > 0 or paren_level > 0 or bracket_level > 0) + ): + return ( + self.text[ + startpos : self.match_position - len(match.group(1)) + ], + match.group(1), + ) + elif not match: + match = self.match(r"(.*?)(?=\"|\'|#|%s)" % text_re, re.S) + if match: + brace_level += match.group(1).count("{") + brace_level -= match.group(1).count("}") + paren_level += match.group(1).count("(") + paren_level -= match.group(1).count(")") + bracket_level += match.group(1).count("[") + bracket_level -= match.group(1).count("]") + continue + raise exceptions.SyntaxException( + "Expected: %s" % ",".join(text), **self.exception_kwargs + ) + + def append_node(self, nodecls, *args, **kwargs): + kwargs.setdefault("source", self.text) + kwargs.setdefault("lineno", self.matched_lineno) + kwargs.setdefault("pos", self.matched_charpos) + kwargs["filename"] = self.filename + node = nodecls(*args, **kwargs) + if len(self.tag): + self.tag[-1].nodes.append(node) + else: + self.template.nodes.append(node) + # build a set of child nodes for the control line + # (used for loop variable detection) + # also build a set of child nodes on ternary control lines + # (used for determining if a pass needs to be auto-inserted + if self.control_line: + control_frame = self.control_line[-1] + control_frame.nodes.append(node) + if ( + not ( + isinstance(node, parsetree.ControlLine) + and control_frame.is_ternary(node.keyword) + ) + and self.ternary_stack + and self.ternary_stack[-1] + ): + self.ternary_stack[-1][-1].nodes.append(node) + if isinstance(node, parsetree.Tag): + if len(self.tag): + node.parent = self.tag[-1] + self.tag.append(node) + elif isinstance(node, parsetree.ControlLine): + if node.isend: + self.control_line.pop() + self.ternary_stack.pop() + elif node.is_primary: + self.control_line.append(node) + self.ternary_stack.append([]) + elif self.control_line and self.control_line[-1].is_ternary( + node.keyword + ): + self.ternary_stack[-1].append(node) + elif self.control_line and not self.control_line[-1].is_ternary( + node.keyword + ): + raise exceptions.SyntaxException( + "Keyword '%s' not a legal ternary for keyword '%s'" + % (node.keyword, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + + _coding_re = re.compile(r"#.*coding[:=]\s*([-\w.]+).*\r?\n") + + def decode_raw_stream(self, text, decode_raw, known_encoding, filename): + """given string/unicode or bytes/string, determine encoding + from magic encoding comment, return body as unicode + or raw if decode_raw=False + + """ + if isinstance(text, str): + m = self._coding_re.match(text) + encoding = m and m.group(1) or known_encoding or "utf-8" + return encoding, text + + if text.startswith(codecs.BOM_UTF8): + text = text[len(codecs.BOM_UTF8) :] + parsed_encoding = "utf-8" + m = self._coding_re.match(text.decode("utf-8", "ignore")) + if m is not None and m.group(1) != "utf-8": + raise exceptions.CompileException( + "Found utf-8 BOM in file, with conflicting " + "magic encoding comment of '%s'" % m.group(1), + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) + else: + m = self._coding_re.match(text.decode("utf-8", "ignore")) + parsed_encoding = m.group(1) if m else known_encoding or "utf-8" + if decode_raw: + try: + text = text.decode(parsed_encoding) + except UnicodeDecodeError: + raise exceptions.CompileException( + "Unicode decode operation of encoding '%s' failed" + % parsed_encoding, + text.decode("utf-8", "ignore"), + 0, + 0, + filename, + ) + + return parsed_encoding, text + + def parse(self): + self.encoding, self.text = self.decode_raw_stream( + self.text, True, self.encoding, self.filename + ) + + for preproc in self.preprocessor: + self.text = preproc(self.text) + + # push the match marker past the + # encoding comment. + self.match_reg(self._coding_re) + + self.textlength = len(self.text) + + while True: + if self.match_position > self.textlength: + break + + if self.match_end(): + break + if self.match_expression(): + continue + if self.match_control_line(): + continue + if self.match_comment(): + continue + if self.match_tag_start(): + continue + if self.match_tag_end(): + continue + if self.match_python_block(): + continue + if self.match_text(): + continue + + if self.match_position > self.textlength: + break + # TODO: no coverage here + raise exceptions.MakoException("assertion failed") + + if len(self.tag): + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + if len(self.control_line): + raise exceptions.SyntaxException( + "Unterminated control keyword: '%s'" + % self.control_line[-1].keyword, + self.text, + self.control_line[-1].lineno, + self.control_line[-1].pos, + self.filename, + ) + return self.template + + def match_tag_start(self): + reg = r""" + \<% # opening tag + + ([\w\.\:]+) # keyword + + ((?:\s+\w+|\s*=\s*|"[^"]*?"|'[^']*?'|\s*,\s*)*) # attrname, = \ + # sign, string expression + # comma is for backwards compat + # identified in #366 + + \s* # more whitespace + + (/)?> # closing + + """ + + match = self.match( + reg, + re.I | re.S | re.X, + ) + + if not match: + return False + + keyword, attr, isend = match.groups() + self.keyword = keyword + attributes = {} + if attr: + for att in re.findall( + r"\s*(\w+)\s*=\s*(?:'([^']*)'|\"([^\"]*)\")", attr + ): + key, val1, val2 = att + text = val1 or val2 + text = text.replace("\r\n", "\n") + attributes[key] = text + self.append_node(parsetree.Tag, keyword, attributes) + if isend: + self.tag.pop() + elif keyword == "text": + match = self.match(r"(.*?)(?=\)", re.S) + if not match: + raise exceptions.SyntaxException( + "Unclosed tag: <%%%s>" % self.tag[-1].keyword, + **self.exception_kwargs, + ) + self.append_node(parsetree.Text, match.group(1)) + return self.match_tag_end() + return True + + def match_tag_end(self): + match = self.match(r"\") + if match: + if not len(self.tag): + raise exceptions.SyntaxException( + "Closing tag without opening tag: " + % match.group(1), + **self.exception_kwargs, + ) + elif self.tag[-1].keyword != match.group(1): + raise exceptions.SyntaxException( + "Closing tag does not match tag: <%%%s>" + % (match.group(1), self.tag[-1].keyword), + **self.exception_kwargs, + ) + self.tag.pop() + return True + else: + return False + + def match_end(self): + match = self.match(r"\Z", re.S) + if not match: + return False + + string = match.group() + if string: + return string + else: + return True + + def match_text(self): + match = self.match( + r""" + (.*?) # anything, followed by: + ( + (?<=\n)(?=[ \t]*(?=%|\#\#)) # an eval or line-based + # comment preceded by a + # consumed newline and whitespace + | + (?=\${) # an expression + | + (?=") + # the trailing newline helps + # compiler.parse() not complain about indentation + text = adjust_whitespace(text) + "\n" + self.append_node( + parsetree.Code, + text, + match.group(1) == "!", + lineno=line, + pos=pos, + ) + return True + else: + return False + + def match_expression(self): + match = self.match(r"\${") + if not match: + return False + + line, pos = self.matched_lineno, self.matched_charpos + text, end = self.parse_until_text(True, r"\|", r"}") + if end == "|": + escapes, end = self.parse_until_text(True, r"}") + else: + escapes = "" + text = text.replace("\r\n", "\n") + self.append_node( + parsetree.Expression, + text, + escapes.strip(), + lineno=line, + pos=pos, + ) + return True + + def match_control_line(self): + match = self.match( + r"(?<=^)[\t ]*(%(?!%)|##)[\t ]*((?:(?:\\\r?\n)|[^\r\n])*)" + r"(?:\r?\n|\Z)", + re.M, + ) + if not match: + return False + + operator = match.group(1) + text = match.group(2) + if operator == "%": + m2 = re.match(r"(end)?(\w+)\s*(.*)", text) + if not m2: + raise exceptions.SyntaxException( + "Invalid control line: '%s'" % text, + **self.exception_kwargs, + ) + isend, keyword = m2.group(1, 2) + isend = isend is not None + + if isend: + if not len(self.control_line): + raise exceptions.SyntaxException( + "No starting keyword '%s' for '%s'" % (keyword, text), + **self.exception_kwargs, + ) + elif self.control_line[-1].keyword != keyword: + raise exceptions.SyntaxException( + "Keyword '%s' doesn't match keyword '%s'" + % (text, self.control_line[-1].keyword), + **self.exception_kwargs, + ) + self.append_node(parsetree.ControlLine, keyword, isend, text) + else: + self.append_node(parsetree.Comment, text) + return True + + def match_comment(self): + """matches the multiline version of a comment""" + match = self.match(r"<%doc>(.*?)", re.S) + if match: + self.append_node(parsetree.Comment, match.group(1)) + return True + else: + return False diff --git a/myenv/lib/python3.10/site-packages/mako/lookup.py b/myenv/lib/python3.10/site-packages/mako/lookup.py new file mode 100644 index 000000000..f7410ce3b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/lookup.py @@ -0,0 +1,362 @@ +# mako/lookup.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +import os +import posixpath +import re +import stat +import threading + +from mako import exceptions +from mako import util +from mako.template import Template + + +class TemplateCollection: + + """Represent a collection of :class:`.Template` objects, + identifiable via URI. + + A :class:`.TemplateCollection` is linked to the usage of + all template tags that address other templates, such + as ``<%include>``, ``<%namespace>``, and ``<%inherit>``. + The ``file`` attribute of each of those tags refers + to a string URI that is passed to that :class:`.Template` + object's :class:`.TemplateCollection` for resolution. + + :class:`.TemplateCollection` is an abstract class, + with the usual default implementation being :class:`.TemplateLookup`. + + """ + + def has_template(self, uri): + """Return ``True`` if this :class:`.TemplateLookup` is + capable of returning a :class:`.Template` object for the + given ``uri``. + + :param uri: String URI of the template to be resolved. + + """ + try: + self.get_template(uri) + return True + except exceptions.TemplateLookupException: + return False + + def get_template(self, uri, relativeto=None): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + The default implementation raises + :class:`.NotImplementedError`. Implementations should + raise :class:`.TemplateLookupException` if the given ``uri`` + cannot be resolved. + + :param uri: String URI of the template to be resolved. + :param relativeto: if present, the given ``uri`` is assumed to + be relative to this URI. + + """ + raise NotImplementedError() + + def filename_to_uri(self, uri, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + return uri + + def adjust_uri(self, uri, filename): + """Adjust the given ``uri`` based on the calling ``filename``. + + When this method is called from the runtime, the + ``filename`` parameter is taken directly to the ``filename`` + attribute of the calling template. Therefore a custom + :class:`.TemplateCollection` subclass can place any string + identifier desired in the ``filename`` parameter of the + :class:`.Template` objects it constructs and have them come back + here. + + """ + return uri + + +class TemplateLookup(TemplateCollection): + + """Represent a collection of templates that locates template source files + from the local filesystem. + + The primary argument is the ``directories`` argument, the list of + directories to search: + + .. sourcecode:: python + + lookup = TemplateLookup(["/path/to/templates"]) + some_template = lookup.get_template("/index.html") + + The :class:`.TemplateLookup` can also be given :class:`.Template` objects + programatically using :meth:`.put_string` or :meth:`.put_template`: + + .. sourcecode:: python + + lookup = TemplateLookup() + lookup.put_string("base.html", ''' + ${self.next()} + ''') + lookup.put_string("hello.html", ''' + <%include file='base.html'/> + + Hello, world ! + ''') + + + :param directories: A list of directory names which will be + searched for a particular template URI. The URI is appended + to each directory and the filesystem checked. + + :param collection_size: Approximate size of the collection used + to store templates. If left at its default of ``-1``, the size + is unbounded, and a plain Python dictionary is used to + relate URI strings to :class:`.Template` instances. + Otherwise, a least-recently-used cache object is used which + will maintain the size of the collection approximately to + the number given. + + :param filesystem_checks: When at its default value of ``True``, + each call to :meth:`.TemplateLookup.get_template()` will + compare the filesystem last modified time to the time in + which an existing :class:`.Template` object was created. + This allows the :class:`.TemplateLookup` to regenerate a + new :class:`.Template` whenever the original source has + been updated. Set this to ``False`` for a very minor + performance increase. + + :param modulename_callable: A callable which, when present, + is passed the path of the source file as well as the + requested URI, and then returns the full path of the + generated Python module file. This is used to inject + alternate schemes for Python module location. If left at + its default of ``None``, the built in system of generation + based on ``module_directory`` plus ``uri`` is used. + + All other keyword parameters available for + :class:`.Template` are mirrored here. When new + :class:`.Template` objects are created, the keywords + established with this :class:`.TemplateLookup` are passed on + to each new :class:`.Template`. + + """ + + def __init__( + self, + directories=None, + module_directory=None, + filesystem_checks=True, + collection_size=-1, + format_exceptions=False, + error_handler=None, + output_encoding=None, + encoding_errors="strict", + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + modulename_callable=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + input_encoding=None, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + + self.directories = [ + posixpath.normpath(d) for d in util.to_list(directories, ()) + ] + self.module_directory = module_directory + self.modulename_callable = modulename_callable + self.filesystem_checks = filesystem_checks + self.collection_size = collection_size + + if cache_args is None: + cache_args = {} + # transfer deprecated cache_* args + if cache_dir: + cache_args.setdefault("dir", cache_dir) + if cache_url: + cache_args.setdefault("url", cache_url) + if cache_type: + cache_args.setdefault("type", cache_type) + + self.template_args = { + "format_exceptions": format_exceptions, + "error_handler": error_handler, + "include_error_handler": include_error_handler, + "output_encoding": output_encoding, + "cache_impl": cache_impl, + "encoding_errors": encoding_errors, + "input_encoding": input_encoding, + "module_directory": module_directory, + "module_writer": module_writer, + "cache_args": cache_args, + "cache_enabled": cache_enabled, + "default_filters": default_filters, + "buffer_filters": buffer_filters, + "strict_undefined": strict_undefined, + "imports": imports, + "future_imports": future_imports, + "enable_loop": enable_loop, + "preprocessor": preprocessor, + "lexer_cls": lexer_cls, + } + + if collection_size == -1: + self._collection = {} + self._uri_cache = {} + else: + self._collection = util.LRUCache(collection_size) + self._uri_cache = util.LRUCache(collection_size) + self._mutex = threading.Lock() + + def get_template(self, uri): + """Return a :class:`.Template` object corresponding to the given + ``uri``. + + .. note:: The ``relativeto`` argument is not supported here at + the moment. + + """ + + try: + if self.filesystem_checks: + return self._check(uri, self._collection[uri]) + else: + return self._collection[uri] + except KeyError as e: + u = re.sub(r"^\/+", "", uri) + for dir_ in self.directories: + # make sure the path seperators are posix - os.altsep is empty + # on POSIX and cannot be used. + dir_ = dir_.replace(os.path.sep, posixpath.sep) + srcfile = posixpath.normpath(posixpath.join(dir_, u)) + if os.path.isfile(srcfile): + return self._load(srcfile, uri) + else: + raise exceptions.TopLevelLookupException( + "Can't locate template for uri %r" % uri + ) from e + + def adjust_uri(self, uri, relativeto): + """Adjust the given ``uri`` based on the given relative URI.""" + + key = (uri, relativeto) + if key in self._uri_cache: + return self._uri_cache[key] + + if uri[0] == "/": + v = self._uri_cache[key] = uri + elif relativeto is not None: + v = self._uri_cache[key] = posixpath.join( + posixpath.dirname(relativeto), uri + ) + else: + v = self._uri_cache[key] = "/" + uri + return v + + def filename_to_uri(self, filename): + """Convert the given ``filename`` to a URI relative to + this :class:`.TemplateCollection`.""" + + try: + return self._uri_cache[filename] + except KeyError: + value = self._relativeize(filename) + self._uri_cache[filename] = value + return value + + def _relativeize(self, filename): + """Return the portion of a filename that is 'relative' + to the directories in this lookup. + + """ + + filename = posixpath.normpath(filename) + for dir_ in self.directories: + if filename[0 : len(dir_)] == dir_: + return filename[len(dir_) :] + else: + return None + + def _load(self, filename, uri): + self._mutex.acquire() + try: + try: + # try returning from collection one + # more time in case concurrent thread already loaded + return self._collection[uri] + except KeyError: + pass + try: + if self.modulename_callable is not None: + module_filename = self.modulename_callable(filename, uri) + else: + module_filename = None + self._collection[uri] = template = Template( + uri=uri, + filename=posixpath.normpath(filename), + lookup=self, + module_filename=module_filename, + **self.template_args, + ) + return template + except: + # if compilation fails etc, ensure + # template is removed from collection, + # re-raise + self._collection.pop(uri, None) + raise + finally: + self._mutex.release() + + def _check(self, uri, template): + if template.filename is None: + return template + + try: + template_stat = os.stat(template.filename) + if template.module._modified_time >= template_stat[stat.ST_MTIME]: + return template + self._collection.pop(uri, None) + return self._load(template.filename, uri) + except OSError as e: + self._collection.pop(uri, None) + raise exceptions.TemplateLookupException( + "Can't locate template for uri %r" % uri + ) from e + + def put_string(self, uri, text): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given string of + ``text``. + + """ + self._collection[uri] = Template( + text, lookup=self, uri=uri, **self.template_args + ) + + def put_template(self, uri, template): + """Place a new :class:`.Template` object into this + :class:`.TemplateLookup`, based on the given + :class:`.Template` object. + + """ + self._collection[uri] = template diff --git a/myenv/lib/python3.10/site-packages/mako/parsetree.py b/myenv/lib/python3.10/site-packages/mako/parsetree.py new file mode 100644 index 000000000..c5a12d1d5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/parsetree.py @@ -0,0 +1,656 @@ +# mako/parsetree.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""defines the parse tree components for Mako templates.""" + +import re + +from mako import ast +from mako import exceptions +from mako import filters +from mako import util + + +class Node: + + """base class for a Node in the parse tree.""" + + def __init__(self, source, lineno, pos, filename): + self.source = source + self.lineno = lineno + self.pos = pos + self.filename = filename + + @property + def exception_kwargs(self): + return { + "source": self.source, + "lineno": self.lineno, + "pos": self.pos, + "filename": self.filename, + } + + def get_children(self): + return [] + + def accept_visitor(self, visitor): + def traverse(node): + for n in node.get_children(): + n.accept_visitor(visitor) + + method = getattr(visitor, "visit" + self.__class__.__name__, traverse) + method(self) + + +class TemplateNode(Node): + + """a 'container' node that stores the overall collection of nodes.""" + + def __init__(self, filename): + super().__init__("", 0, 0, filename) + self.nodes = [] + self.page_attributes = {} + + def get_children(self): + return self.nodes + + def __repr__(self): + return "TemplateNode(%s, %r)" % ( + util.sorted_dict_repr(self.page_attributes), + self.nodes, + ) + + +class ControlLine(Node): + + """defines a control line, a line-oriented python line or end tag. + + e.g.:: + + % if foo: + (markup) + % endif + + """ + + has_loop_context = False + + def __init__(self, keyword, isend, text, **kwargs): + super().__init__(**kwargs) + self.text = text + self.keyword = keyword + self.isend = isend + self.is_primary = keyword in ["for", "if", "while", "try", "with"] + self.nodes = [] + if self.isend: + self._declared_identifiers = [] + self._undeclared_identifiers = [] + else: + code = ast.PythonFragment(text, **self.exception_kwargs) + self._declared_identifiers = code.declared_identifiers + self._undeclared_identifiers = code.undeclared_identifiers + + def get_children(self): + return self.nodes + + def declared_identifiers(self): + return self._declared_identifiers + + def undeclared_identifiers(self): + return self._undeclared_identifiers + + def is_ternary(self, keyword): + """return true if the given keyword is a ternary keyword + for this ControlLine""" + + cases = { + "if": {"else", "elif"}, + "try": {"except", "finally"}, + "for": {"else"}, + } + + return keyword in cases.get(self.keyword, set()) + + def __repr__(self): + return "ControlLine(%r, %r, %r, %r)" % ( + self.keyword, + self.text, + self.isend, + (self.lineno, self.pos), + ) + + +class Text(Node): + """defines plain text in the template.""" + + def __init__(self, content, **kwargs): + super().__init__(**kwargs) + self.content = content + + def __repr__(self): + return "Text(%r, %r)" % (self.content, (self.lineno, self.pos)) + + +class Code(Node): + """defines a Python code block, either inline or module level. + + e.g.:: + + inline: + <% + x = 12 + %> + + module level: + <%! + import logger + %> + + """ + + def __init__(self, text, ismodule, **kwargs): + super().__init__(**kwargs) + self.text = text + self.ismodule = ismodule + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return self.code.declared_identifiers + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers + + def __repr__(self): + return "Code(%r, %r, %r)" % ( + self.text, + self.ismodule, + (self.lineno, self.pos), + ) + + +class Comment(Node): + """defines a comment line. + + # this is a comment + + """ + + def __init__(self, text, **kwargs): + super().__init__(**kwargs) + self.text = text + + def __repr__(self): + return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos)) + + +class Expression(Node): + """defines an inline expression. + + ${x+y} + + """ + + def __init__(self, text, escapes, **kwargs): + super().__init__(**kwargs) + self.text = text + self.escapes = escapes + self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs) + self.code = ast.PythonCode(text, **self.exception_kwargs) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + # TODO: make the "filter" shortcut list configurable at parse/gen time + return self.code.undeclared_identifiers.union( + self.escapes_code.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES + ) + ).difference(self.code.declared_identifiers) + + def __repr__(self): + return "Expression(%r, %r, %r)" % ( + self.text, + self.escapes_code.args, + (self.lineno, self.pos), + ) + + +class _TagMeta(type): + """metaclass to allow Tag to produce a subclass according to + its keyword""" + + _classmap = {} + + def __init__(cls, clsname, bases, dict_): + if getattr(cls, "__keyword__", None) is not None: + cls._classmap[cls.__keyword__] = cls + super().__init__(clsname, bases, dict_) + + def __call__(cls, keyword, attributes, **kwargs): + if ":" in keyword: + ns, defname = keyword.split(":") + return type.__call__( + CallNamespaceTag, ns, defname, attributes, **kwargs + ) + + try: + cls = _TagMeta._classmap[keyword] + except KeyError: + raise exceptions.CompileException( + "No such tag: '%s'" % keyword, + source=kwargs["source"], + lineno=kwargs["lineno"], + pos=kwargs["pos"], + filename=kwargs["filename"], + ) + return type.__call__(cls, keyword, attributes, **kwargs) + + +class Tag(Node, metaclass=_TagMeta): + """abstract base class for tags. + + e.g.:: + + <%sometag/> + + <%someothertag> + stuff + + + """ + + __keyword__ = None + + def __init__( + self, + keyword, + attributes, + expressions, + nonexpressions, + required, + **kwargs, + ): + r"""construct a new Tag instance. + + this constructor not called directly, and is only called + by subclasses. + + :param keyword: the tag keyword + + :param attributes: raw dictionary of attribute key/value pairs + + :param expressions: a set of identifiers that are legal attributes, + which can also contain embedded expressions + + :param nonexpressions: a set of identifiers that are legal + attributes, which cannot contain embedded expressions + + :param \**kwargs: + other arguments passed to the Node superclass (lineno, pos) + + """ + super().__init__(**kwargs) + self.keyword = keyword + self.attributes = attributes + self._parse_attributes(expressions, nonexpressions) + missing = [r for r in required if r not in self.parsed_attributes] + if len(missing): + raise exceptions.CompileException( + ( + "Missing attribute(s): %s" + % ",".join(repr(m) for m in missing) + ), + **self.exception_kwargs, + ) + + self.parent = None + self.nodes = [] + + def is_root(self): + return self.parent is None + + def get_children(self): + return self.nodes + + def _parse_attributes(self, expressions, nonexpressions): + undeclared_identifiers = set() + self.parsed_attributes = {} + for key in self.attributes: + if key in expressions: + expr = [] + for x in re.compile(r"(\${.+?})", re.S).split( + self.attributes[key] + ): + m = re.compile(r"^\${(.+?)}$", re.S).match(x) + if m: + code = ast.PythonCode( + m.group(1).rstrip(), **self.exception_kwargs + ) + # we aren't discarding "declared_identifiers" here, + # which we do so that list comprehension-declared + # variables aren't counted. As yet can't find a + # condition that requires it here. + undeclared_identifiers = undeclared_identifiers.union( + code.undeclared_identifiers + ) + expr.append("(%s)" % m.group(1)) + elif x: + expr.append(repr(x)) + self.parsed_attributes[key] = " + ".join(expr) or repr("") + elif key in nonexpressions: + if re.search(r"\${.+?}", self.attributes[key]): + raise exceptions.CompileException( + "Attribute '%s' in tag '%s' does not allow embedded " + "expressions" % (key, self.keyword), + **self.exception_kwargs, + ) + self.parsed_attributes[key] = repr(self.attributes[key]) + else: + raise exceptions.CompileException( + "Invalid attribute for tag '%s': '%s'" + % (self.keyword, key), + **self.exception_kwargs, + ) + self.expression_undeclared_identifiers = undeclared_identifiers + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + return self.expression_undeclared_identifiers + + def __repr__(self): + return "%s(%r, %s, %r, %r)" % ( + self.__class__.__name__, + self.keyword, + util.sorted_dict_repr(self.attributes), + (self.lineno, self.pos), + self.nodes, + ) + + +class IncludeTag(Tag): + __keyword__ = "include" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, + attributes, + ("file", "import", "args"), + (), + ("file",), + **kwargs, + ) + self.page_args = ast.PythonCode( + "__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return [] + + def undeclared_identifiers(self): + identifiers = self.page_args.undeclared_identifiers.difference( + {"__DUMMY"} + ).difference(self.page_args.declared_identifiers) + return identifiers.union(super().undeclared_identifiers()) + + +class NamespaceTag(Tag): + __keyword__ = "namespace" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, + attributes, + ("file",), + ("name", "inheritable", "import", "module"), + (), + **kwargs, + ) + + self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self)))) + if "name" not in attributes and "import" not in attributes: + raise exceptions.CompileException( + "'name' and/or 'import' attributes are required " + "for <%namespace>", + **self.exception_kwargs, + ) + if "file" in attributes and "module" in attributes: + raise exceptions.CompileException( + "<%namespace> may only have one of 'file' or 'module'", + **self.exception_kwargs, + ) + + def declared_identifiers(self): + return [] + + +class TextTag(Tag): + __keyword__ = "text" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__(keyword, attributes, (), ("filter"), (), **kwargs) + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + def undeclared_identifiers(self): + return self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ).union(self.expression_undeclared_identifiers) + + +class DefTag(Tag): + __keyword__ = "def" + + def __init__(self, keyword, attributes, **kwargs): + expressions = ["buffered", "cached"] + [ + c for c in attributes if c.startswith("cache_") + ] + + super().__init__( + keyword, + attributes, + expressions, + ("name", "filter", "decorator"), + ("name",), + **kwargs, + ) + name = attributes["name"] + if re.match(r"^[\w_]+$", name): + raise exceptions.CompileException( + "Missing parenthesis in %def", **self.exception_kwargs + ) + self.function_decl = ast.FunctionDecl( + "def " + name + ":pass", **self.exception_kwargs + ) + self.name = self.function_decl.funcname + self.decorator = attributes.get("decorator", "") + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + is_anonymous = False + is_block = False + + @property + def funcname(self): + return self.function_decl.funcname + + def get_argument_expressions(self, **kw): + return self.function_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.function_decl.allargnames + + def undeclared_identifiers(self): + res = [] + for c in self.function_decl.defaults: + res += list( + ast.PythonCode( + c, **self.exception_kwargs + ).undeclared_identifiers + ) + return ( + set(res) + .union( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ) + .union(self.expression_undeclared_identifiers) + .difference(self.function_decl.allargnames) + ) + + +class BlockTag(Tag): + __keyword__ = "block" + + def __init__(self, keyword, attributes, **kwargs): + expressions = ["buffered", "cached", "args"] + [ + c for c in attributes if c.startswith("cache_") + ] + + super().__init__( + keyword, + attributes, + expressions, + ("name", "filter", "decorator"), + (), + **kwargs, + ) + name = attributes.get("name") + if name and not re.match(r"^[\w_]+$", name): + raise exceptions.CompileException( + "%block may not specify an argument signature", + **self.exception_kwargs, + ) + if not name and attributes.get("args", None): + raise exceptions.CompileException( + "Only named %blocks may specify args", **self.exception_kwargs + ) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + self.name = name + self.decorator = attributes.get("decorator", "") + self.filter_args = ast.ArgumentList( + attributes.get("filter", ""), **self.exception_kwargs + ) + + is_block = True + + @property + def is_anonymous(self): + return self.name is None + + @property + def funcname(self): + return self.name or "__M_anon_%d" % (self.lineno,) + + def get_argument_expressions(self, **kw): + return self.body_decl.get_argument_expressions(**kw) + + def declared_identifiers(self): + return self.body_decl.allargnames + + def undeclared_identifiers(self): + return ( + self.filter_args.undeclared_identifiers.difference( + filters.DEFAULT_ESCAPES.keys() + ) + ).union(self.expression_undeclared_identifiers) + + +class CallTag(Tag): + __keyword__ = "call" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs + ) + self.expression = attributes["expr"] + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) + + +class CallNamespaceTag(Tag): + def __init__(self, namespace, defname, attributes, **kwargs): + super().__init__( + namespace + ":" + defname, + attributes, + tuple(attributes.keys()) + ("args",), + (), + (), + **kwargs, + ) + + self.expression = "%s.%s(%s)" % ( + namespace, + defname, + ",".join( + "%s=%s" % (k, v) + for k, v in self.parsed_attributes.items() + if k != "args" + ), + ) + + self.code = ast.PythonCode(self.expression, **self.exception_kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.code.declared_identifiers.union(self.body_decl.allargnames) + + def undeclared_identifiers(self): + return self.code.undeclared_identifiers.difference( + self.code.declared_identifiers + ) + + +class InheritTag(Tag): + __keyword__ = "inherit" + + def __init__(self, keyword, attributes, **kwargs): + super().__init__( + keyword, attributes, ("file",), (), ("file",), **kwargs + ) + + +class PageTag(Tag): + __keyword__ = "page" + + def __init__(self, keyword, attributes, **kwargs): + expressions = [ + "cached", + "args", + "expression_filter", + "enable_loop", + ] + [c for c in attributes if c.startswith("cache_")] + + super().__init__(keyword, attributes, expressions, (), (), **kwargs) + self.body_decl = ast.FunctionArgs( + attributes.get("args", ""), **self.exception_kwargs + ) + self.filter_args = ast.ArgumentList( + attributes.get("expression_filter", ""), **self.exception_kwargs + ) + + def declared_identifiers(self): + return self.body_decl.allargnames diff --git a/myenv/lib/python3.10/site-packages/mako/pygen.py b/myenv/lib/python3.10/site-packages/mako/pygen.py new file mode 100644 index 000000000..57c697931 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/pygen.py @@ -0,0 +1,309 @@ +# mako/pygen.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""utilities for generating and formatting literal Python code.""" + +import re + +from mako import exceptions + + +class PythonPrinter: + def __init__(self, stream): + # indentation counter + self.indent = 0 + + # a stack storing information about why we incremented + # the indentation counter, to help us determine if we + # should decrement it + self.indent_detail = [] + + # the string of whitespace multiplied by the indent + # counter to produce a line + self.indentstring = " " + + # the stream we are writing to + self.stream = stream + + # current line number + self.lineno = 1 + + # a list of lines that represents a buffered "block" of code, + # which can be later printed relative to an indent level + self.line_buffer = [] + + self.in_indent_lines = False + + self._reset_multi_line_flags() + + # mapping of generated python lines to template + # source lines + self.source_map = {} + + self._re_space_comment = re.compile(r"^\s*#") + self._re_space = re.compile(r"^\s*$") + self._re_indent = re.compile(r":[ \t]*(?:#.*)?$") + self._re_compound = re.compile(r"^\s*(if|try|elif|while|for|with)") + self._re_indent_keyword = re.compile( + r"^\s*(def|class|else|elif|except|finally)" + ) + self._re_unindentor = re.compile(r"^\s*(else|elif|except|finally).*\:") + + def _update_lineno(self, num): + self.lineno += num + + def start_source(self, lineno): + if self.lineno not in self.source_map: + self.source_map[self.lineno] = lineno + + def write_blanks(self, num): + self.stream.write("\n" * num) + self._update_lineno(num) + + def write_indented_block(self, block, starting_lineno=None): + """print a line or lines of python which already contain indentation. + + The indentation of the total block of lines will be adjusted to that of + the current indent level.""" + self.in_indent_lines = False + for i, l in enumerate(re.split(r"\r?\n", block)): + self.line_buffer.append(l) + if starting_lineno is not None: + self.start_source(starting_lineno + i) + self._update_lineno(1) + + def writelines(self, *lines): + """print a series of lines of python.""" + for line in lines: + self.writeline(line) + + def writeline(self, line): + """print a line of python, indenting it according to the current + indent level. + + this also adjusts the indentation counter according to the + content of the line. + + """ + + if not self.in_indent_lines: + self._flush_adjusted_lines() + self.in_indent_lines = True + + if ( + line is None + or self._re_space_comment.match(line) + or self._re_space.match(line) + ): + hastext = False + else: + hastext = True + + is_comment = line and len(line) and line[0] == "#" + + # see if this line should decrease the indentation level + if ( + not is_comment + and (not hastext or self._is_unindentor(line)) + and self.indent > 0 + ): + self.indent -= 1 + # if the indent_detail stack is empty, the user + # probably put extra closures - the resulting + # module wont compile. + if len(self.indent_detail) == 0: + # TODO: no coverage here + raise exceptions.MakoException("Too many whitespace closures") + self.indent_detail.pop() + + if line is None: + return + + # write the line + self.stream.write(self._indent_line(line) + "\n") + self._update_lineno(len(line.split("\n"))) + + # see if this line should increase the indentation level. + # note that a line can both decrase (before printing) and + # then increase (after printing) the indentation level. + + if self._re_indent.search(line): + # increment indentation count, and also + # keep track of what the keyword was that indented us, + # if it is a python compound statement keyword + # where we might have to look for an "unindent" keyword + match = self._re_compound.match(line) + if match: + # its a "compound" keyword, so we will check for "unindentors" + indentor = match.group(1) + self.indent += 1 + self.indent_detail.append(indentor) + else: + indentor = None + # its not a "compound" keyword. but lets also + # test for valid Python keywords that might be indenting us, + # else assume its a non-indenting line + m2 = self._re_indent_keyword.match(line) + if m2: + self.indent += 1 + self.indent_detail.append(indentor) + + def close(self): + """close this printer, flushing any remaining lines.""" + self._flush_adjusted_lines() + + def _is_unindentor(self, line): + """return true if the given line is an 'unindentor', + relative to the last 'indent' event received. + + """ + + # no indentation detail has been pushed on; return False + if len(self.indent_detail) == 0: + return False + + indentor = self.indent_detail[-1] + + # the last indent keyword we grabbed is not a + # compound statement keyword; return False + if indentor is None: + return False + + # if the current line doesnt have one of the "unindentor" keywords, + # return False + match = self._re_unindentor.match(line) + # if True, whitespace matches up, we have a compound indentor, + # and this line has an unindentor, this + # is probably good enough + return bool(match) + + # should we decide that its not good enough, heres + # more stuff to check. + # keyword = match.group(1) + + # match the original indent keyword + # for crit in [ + # (r'if|elif', r'else|elif'), + # (r'try', r'except|finally|else'), + # (r'while|for', r'else'), + # ]: + # if re.match(crit[0], indentor) and re.match(crit[1], keyword): + # return True + + # return False + + def _indent_line(self, line, stripspace=""): + """indent the given line according to the current indent level. + + stripspace is a string of space that will be truncated from the + start of the line before indenting.""" + if stripspace == "": + # Fast path optimization. + return self.indentstring * self.indent + line + + return re.sub( + r"^%s" % stripspace, self.indentstring * self.indent, line + ) + + def _reset_multi_line_flags(self): + """reset the flags which would indicate we are in a backslashed + or triple-quoted section.""" + + self.backslashed, self.triplequoted = False, False + + def _in_multi_line(self, line): + """return true if the given line is part of a multi-line block, + via backslash or triple-quote.""" + + # we are only looking for explicitly joined lines here, not + # implicit ones (i.e. brackets, braces etc.). this is just to + # guard against the possibility of modifying the space inside of + # a literal multiline string with unfortunately placed + # whitespace + + current_state = self.backslashed or self.triplequoted + + self.backslashed = bool(re.search(r"\\$", line)) + triples = len(re.findall(r"\"\"\"|\'\'\'", line)) + if triples == 1 or triples % 2 != 0: + self.triplequoted = not self.triplequoted + + return current_state + + def _flush_adjusted_lines(self): + stripspace = None + self._reset_multi_line_flags() + + for entry in self.line_buffer: + if self._in_multi_line(entry): + self.stream.write(entry + "\n") + else: + entry = entry.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry): + stripspace = re.match(r"^([ \t]*)", entry).group(1) + self.stream.write(self._indent_line(entry, stripspace) + "\n") + + self.line_buffer = [] + self._reset_multi_line_flags() + + +def adjust_whitespace(text): + """remove the left-whitespace margin of a block of Python code.""" + + state = [False, False] + (backslashed, triplequoted) = (0, 1) + + def in_multi_line(line): + start_state = state[backslashed] or state[triplequoted] + + if re.search(r"\\$", line): + state[backslashed] = True + else: + state[backslashed] = False + + def match(reg, t): + m = re.match(reg, t) + if m: + return m, t[len(m.group(0)) :] + else: + return None, t + + while line: + if state[triplequoted]: + m, line = match(r"%s" % state[triplequoted], line) + if m: + state[triplequoted] = False + else: + m, line = match(r".*?(?=%s|$)" % state[triplequoted], line) + else: + m, line = match(r"#", line) + if m: + return start_state + + m, line = match(r"\"\"\"|\'\'\'", line) + if m: + state[triplequoted] = m.group(0) + continue + + m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line) + + return start_state + + def _indent_line(line, stripspace=""): + return re.sub(r"^%s" % stripspace, "", line) + + lines = [] + stripspace = None + + for line in re.split(r"\r?\n", text): + if in_multi_line(line): + lines.append(line) + else: + line = line.expandtabs() + if stripspace is None and re.search(r"^[ \t]*[^# \t]", line): + stripspace = re.match(r"^([ \t]*)", line).group(1) + lines.append(_indent_line(line, stripspace)) + return "\n".join(lines) diff --git a/myenv/lib/python3.10/site-packages/mako/pyparser.py b/myenv/lib/python3.10/site-packages/mako/pyparser.py new file mode 100644 index 000000000..d9b090c6f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/pyparser.py @@ -0,0 +1,220 @@ +# mako/pyparser.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Handles parsing of Python code. + +Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler +module is used. +""" + +import operator + +import _ast + +from mako import _ast_util +from mako import compat +from mako import exceptions +from mako import util + +# words that cannot be assigned to (notably +# smaller than the total keys in __builtins__) +reserved = {"True", "False", "None", "print"} + +# the "id" attribute on a function node +arg_id = operator.attrgetter("arg") + +util.restore__ast(_ast) + + +def parse(code, mode="exec", **exception_kwargs): + """Parse an expression into AST""" + + try: + return _ast_util.parse(code, "", mode) + except Exception as e: + raise exceptions.SyntaxException( + "(%s) %s (%r)" + % ( + compat.exception_as().__class__.__name__, + compat.exception_as(), + code[0:50], + ), + **exception_kwargs, + ) from e + + +class FindIdentifiers(_ast_util.NodeVisitor): + def __init__(self, listener, **exception_kwargs): + self.in_function = False + self.in_assign_targets = False + self.local_ident_stack = set() + self.listener = listener + self.exception_kwargs = exception_kwargs + + def _add_declared(self, name): + if not self.in_function: + self.listener.declared_identifiers.add(name) + else: + self.local_ident_stack.add(name) + + def visit_ClassDef(self, node): + self._add_declared(node.name) + + def visit_Assign(self, node): + + # flip around the visiting of Assign so the expression gets + # evaluated first, in the case of a clause like "x=x+5" (x + # is undeclared) + + self.visit(node.value) + in_a = self.in_assign_targets + self.in_assign_targets = True + for n in node.targets: + self.visit(n) + self.in_assign_targets = in_a + + def visit_ExceptHandler(self, node): + if node.name is not None: + self._add_declared(node.name) + if node.type is not None: + self.visit(node.type) + for statement in node.body: + self.visit(statement) + + def visit_Lambda(self, node, *args): + self._visit_function(node, True) + + def visit_FunctionDef(self, node): + self._add_declared(node.name) + self._visit_function(node, False) + + def _expand_tuples(self, args): + for arg in args: + if isinstance(arg, _ast.Tuple): + yield from arg.elts + else: + yield arg + + def _visit_function(self, node, islambda): + + # push function state onto stack. dont log any more + # identifiers as "declared" until outside of the function, + # but keep logging identifiers as "undeclared". track + # argument names in each function header so they arent + # counted as "undeclared" + + inf = self.in_function + self.in_function = True + + local_ident_stack = self.local_ident_stack + self.local_ident_stack = local_ident_stack.union( + [arg_id(arg) for arg in self._expand_tuples(node.args.args)] + ) + if islambda: + self.visit(node.body) + else: + for n in node.body: + self.visit(n) + self.in_function = inf + self.local_ident_stack = local_ident_stack + + def visit_For(self, node): + + # flip around visit + + self.visit(node.iter) + self.visit(node.target) + for statement in node.body: + self.visit(statement) + for statement in node.orelse: + self.visit(statement) + + def visit_Name(self, node): + if isinstance(node.ctx, _ast.Store): + # this is eqiuvalent to visit_AssName in + # compiler + self._add_declared(node.id) + elif ( + node.id not in reserved + and node.id not in self.listener.declared_identifiers + and node.id not in self.local_ident_stack + ): + self.listener.undeclared_identifiers.add(node.id) + + def visit_Import(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + else: + self._add_declared(name.name.split(".")[0]) + + def visit_ImportFrom(self, node): + for name in node.names: + if name.asname is not None: + self._add_declared(name.asname) + elif name.name == "*": + raise exceptions.CompileException( + "'import *' is not supported, since all identifier " + "names must be explicitly declared. Please use the " + "form 'from import , , " + "...' instead.", + **self.exception_kwargs, + ) + else: + self._add_declared(name.name) + + +class FindTuple(_ast_util.NodeVisitor): + def __init__(self, listener, code_factory, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + self.code_factory = code_factory + + def visit_Tuple(self, node): + for n in node.elts: + p = self.code_factory(n, **self.exception_kwargs) + self.listener.codeargs.append(p) + self.listener.args.append(ExpressionGenerator(n).value()) + ldi = self.listener.declared_identifiers + self.listener.declared_identifiers = ldi.union( + p.declared_identifiers + ) + lui = self.listener.undeclared_identifiers + self.listener.undeclared_identifiers = lui.union( + p.undeclared_identifiers + ) + + +class ParseFunc(_ast_util.NodeVisitor): + def __init__(self, listener, **exception_kwargs): + self.listener = listener + self.exception_kwargs = exception_kwargs + + def visit_FunctionDef(self, node): + self.listener.funcname = node.name + + argnames = [arg_id(arg) for arg in node.args.args] + if node.args.vararg: + argnames.append(node.args.vararg.arg) + + kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs] + if node.args.kwarg: + kwargnames.append(node.args.kwarg.arg) + self.listener.argnames = argnames + self.listener.defaults = node.args.defaults # ast + self.listener.kwargnames = kwargnames + self.listener.kwdefaults = node.args.kw_defaults + self.listener.varargs = node.args.vararg + self.listener.kwargs = node.args.kwarg + + +class ExpressionGenerator: + def __init__(self, astnode): + self.generator = _ast_util.SourceGenerator(" " * 4) + self.generator.visit(astnode) + + def value(self): + return "".join(self.generator.result) diff --git a/myenv/lib/python3.10/site-packages/mako/runtime.py b/myenv/lib/python3.10/site-packages/mako/runtime.py new file mode 100644 index 000000000..6d7fa684a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/runtime.py @@ -0,0 +1,968 @@ +# mako/runtime.py +# Copyright 2006-2020 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""provides runtime services for templates, including Context, +Namespace, and various helper functions.""" + +import builtins +import functools +import sys + +from mako import compat +from mako import exceptions +from mako import util + + +class Context: + + """Provides runtime namespace, output buffer, and various + callstacks for templates. + + See :ref:`runtime_toplevel` for detail on the usage of + :class:`.Context`. + + """ + + def __init__(self, buffer, **data): + self._buffer_stack = [buffer] + + self._data = data + + self._kwargs = data.copy() + self._with_template = None + self._outputting_as_unicode = None + self.namespaces = {} + + # "capture" function which proxies to the + # generic "capture" function + self._data["capture"] = functools.partial(capture, self) + + # "caller" stack used by def calls with content + self.caller_stack = self._data["caller"] = CallerStack() + + def _set_with_template(self, t): + self._with_template = t + illegal_names = t.reserved_names.intersection(self._data) + if illegal_names: + raise exceptions.NameConflictError( + "Reserved words passed to render(): %s" + % ", ".join(illegal_names) + ) + + @property + def lookup(self): + """Return the :class:`.TemplateLookup` associated + with this :class:`.Context`. + + """ + return self._with_template.lookup + + @property + def kwargs(self): + """Return the dictionary of top level keyword arguments associated + with this :class:`.Context`. + + This dictionary only includes the top-level arguments passed to + :meth:`.Template.render`. It does not include names produced within + the template execution such as local variable names or special names + such as ``self``, ``next``, etc. + + The purpose of this dictionary is primarily for the case that + a :class:`.Template` accepts arguments via its ``<%page>`` tag, + which are normally expected to be passed via :meth:`.Template.render`, + except the template is being called in an inheritance context, + using the ``body()`` method. :attr:`.Context.kwargs` can then be + used to propagate these arguments to the inheriting template:: + + ${next.body(**context.kwargs)} + + """ + return self._kwargs.copy() + + def push_caller(self, caller): + """Push a ``caller`` callable onto the callstack for + this :class:`.Context`.""" + + self.caller_stack.append(caller) + + def pop_caller(self): + """Pop a ``caller`` callable onto the callstack for this + :class:`.Context`.""" + + del self.caller_stack[-1] + + def keys(self): + """Return a list of all names established in this :class:`.Context`.""" + + return list(self._data.keys()) + + def __getitem__(self, key): + if key in self._data: + return self._data[key] + else: + return builtins.__dict__[key] + + def _push_writer(self): + """push a capturing buffer onto this Context and return + the new writer function.""" + + buf = util.FastEncodingBuffer() + self._buffer_stack.append(buf) + return buf.write + + def _pop_buffer_and_writer(self): + """pop the most recent capturing buffer from this Context + and return the current writer after the pop. + + """ + + buf = self._buffer_stack.pop() + return buf, self._buffer_stack[-1].write + + def _push_buffer(self): + """push a capturing buffer onto this Context.""" + + self._push_writer() + + def _pop_buffer(self): + """pop the most recent capturing buffer from this Context.""" + + return self._buffer_stack.pop() + + def get(self, key, default=None): + """Return a value from this :class:`.Context`.""" + + return self._data.get(key, builtins.__dict__.get(key, default)) + + def write(self, string): + """Write a string to this :class:`.Context` object's + underlying output buffer.""" + + self._buffer_stack[-1].write(string) + + def writer(self): + """Return the current writer function.""" + + return self._buffer_stack[-1].write + + def _copy(self): + c = Context.__new__(Context) + c._buffer_stack = self._buffer_stack + c._data = self._data.copy() + c._kwargs = self._kwargs + c._with_template = self._with_template + c._outputting_as_unicode = self._outputting_as_unicode + c.namespaces = self.namespaces + c.caller_stack = self.caller_stack + return c + + def _locals(self, d): + """Create a new :class:`.Context` with a copy of this + :class:`.Context`'s current state, + updated with the given dictionary. + + The :attr:`.Context.kwargs` collection remains + unaffected. + + + """ + + if not d: + return self + c = self._copy() + c._data.update(d) + return c + + def _clean_inheritance_tokens(self): + """create a new copy of this :class:`.Context`. with + tokens related to inheritance state removed.""" + + c = self._copy() + x = c._data + x.pop("self", None) + x.pop("parent", None) + x.pop("next", None) + return c + + +class CallerStack(list): + def __init__(self): + self.nextcaller = None + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return len(self) and self._get_caller() and True or False + + def _get_caller(self): + # this method can be removed once + # codegen MAGIC_NUMBER moves past 7 + return self[-1] + + def __getattr__(self, key): + return getattr(self._get_caller(), key) + + def _push_frame(self): + frame = self.nextcaller or None + self.append(frame) + self.nextcaller = None + return frame + + def _pop_frame(self): + self.nextcaller = self.pop() + + +class Undefined: + + """Represents an undefined value in a template. + + All template modules have a constant value + ``UNDEFINED`` present which is an instance of this + object. + + """ + + def __str__(self): + raise NameError("Undefined") + + def __nonzero__(self): + return self.__bool__() + + def __bool__(self): + return False + + +UNDEFINED = Undefined() +STOP_RENDERING = "" + + +class LoopStack: + + """a stack for LoopContexts that implements the context manager protocol + to automatically pop off the top of the stack on context exit + """ + + def __init__(self): + self.stack = [] + + def _enter(self, iterable): + self._push(iterable) + return self._top + + def _exit(self): + self._pop() + return self._top + + @property + def _top(self): + if self.stack: + return self.stack[-1] + else: + return self + + def _pop(self): + return self.stack.pop() + + def _push(self, iterable): + new = LoopContext(iterable) + if self.stack: + new.parent = self.stack[-1] + return self.stack.append(new) + + def __getattr__(self, key): + raise exceptions.RuntimeException("No loop context is established") + + def __iter__(self): + return iter(self._top) + + +class LoopContext: + + """A magic loop variable. + Automatically accessible in any ``% for`` block. + + See the section :ref:`loop_context` for usage + notes. + + :attr:`parent` -> :class:`.LoopContext` or ``None`` + The parent loop, if one exists. + :attr:`index` -> `int` + The 0-based iteration count. + :attr:`reverse_index` -> `int` + The number of iterations remaining. + :attr:`first` -> `bool` + ``True`` on the first iteration, ``False`` otherwise. + :attr:`last` -> `bool` + ``True`` on the last iteration, ``False`` otherwise. + :attr:`even` -> `bool` + ``True`` when ``index`` is even. + :attr:`odd` -> `bool` + ``True`` when ``index`` is odd. + """ + + def __init__(self, iterable): + self._iterable = iterable + self.index = 0 + self.parent = None + + def __iter__(self): + for i in self._iterable: + yield i + self.index += 1 + + @util.memoized_instancemethod + def __len__(self): + return len(self._iterable) + + @property + def reverse_index(self): + return len(self) - self.index - 1 + + @property + def first(self): + return self.index == 0 + + @property + def last(self): + return self.index == len(self) - 1 + + @property + def even(self): + return not self.odd + + @property + def odd(self): + return bool(self.index % 2) + + def cycle(self, *values): + """Cycle through values as the loop progresses.""" + if not values: + raise ValueError("You must provide values to cycle through") + return values[self.index % len(values)] + + +class _NSAttr: + def __init__(self, parent): + self.__parent = parent + + def __getattr__(self, key): + ns = self.__parent + while ns: + if hasattr(ns.module, key): + return getattr(ns.module, key) + else: + ns = ns.inherits + raise AttributeError(key) + + +class Namespace: + + """Provides access to collections of rendering methods, which + can be local, from other templates, or from imported modules. + + To access a particular rendering method referenced by a + :class:`.Namespace`, use plain attribute access: + + .. sourcecode:: mako + + ${some_namespace.foo(x, y, z)} + + :class:`.Namespace` also contains several built-in attributes + described here. + + """ + + def __init__( + self, + name, + context, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + callables = () + + module = None + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + + template = None + """The :class:`.Template` object referenced by this + :class:`.Namespace`, if any. + + """ + + context = None + """The :class:`.Context` object for this :class:`.Namespace`. + + Namespaces are often created with copies of contexts that + contain slightly different data, particularly in inheritance + scenarios. Using the :class:`.Context` off of a :class:`.Namespace` one + can traverse an entire chain of templates that inherit from + one-another. + + """ + + filename = None + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + + If this is a pure module-based + :class:`.Namespace`, this evaluates to ``module.__file__``. If a + template-based namespace, it evaluates to the original + template file location. + + """ + + uri = None + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + + _templateuri = None + + @util.memoized_property + def attr(self): + """Access module level attributes by name. + + This accessor allows templates to supply "scalar" + attributes which are particularly handy in inheritance + relationships. + + .. seealso:: + + :ref:`inheritance_attr` + + :ref:`namespace_attr_for_includes` + + """ + return _NSAttr(self) + + def get_namespace(self, uri): + """Return a :class:`.Namespace` corresponding to the given ``uri``. + + If the given ``uri`` is a relative URI (i.e. it does not + contain a leading slash ``/``), the ``uri`` is adjusted to + be relative to the ``uri`` of the namespace itself. This + method is therefore mostly useful off of the built-in + ``local`` namespace, described in :ref:`namespace_local`. + + In + most cases, a template wouldn't need this function, and + should instead use the ``<%namespace>`` tag to load + namespaces. However, since all ``<%namespace>`` tags are + evaluated before the body of a template ever runs, + this method can be used to locate namespaces using + expressions that were generated within the body code of + the template, or to conditionally use a particular + namespace. + + """ + key = (self, uri) + if key in self.context.namespaces: + return self.context.namespaces[key] + ns = TemplateNamespace( + uri, + self.context._copy(), + templateuri=uri, + calling_uri=self._templateuri, + ) + self.context.namespaces[key] = ns + return ns + + def get_template(self, uri): + """Return a :class:`.Template` from the given ``uri``. + + The ``uri`` resolution is relative to the ``uri`` of this + :class:`.Namespace` object's :class:`.Template`. + + """ + return _lookup_template(self.context, uri, self._templateuri) + + def get_cached(self, key, **kwargs): + """Return a value from the :class:`.Cache` referenced by this + :class:`.Namespace` object's :class:`.Template`. + + The advantage to this method versus direct access to the + :class:`.Cache` is that the configuration parameters + declared in ``<%page>`` take effect here, thereby calling + up the same configured backend as that configured + by ``<%page>``. + + """ + + return self.cache.get(key, **kwargs) + + @property + def cache(self): + """Return the :class:`.Cache` object referenced + by this :class:`.Namespace` object's + :class:`.Template`. + + """ + return self.template.cache + + def include_file(self, uri, **kwargs): + """Include a file at the given ``uri``.""" + + _include_file(self.context, uri, self._templateuri, **kwargs) + + def _populate(self, d, l): + for ident in l: + if ident == "*": + for (k, v) in self._get_star(): + d[k] = v + else: + d[ident] = getattr(self, ident) + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +class TemplateNamespace(Namespace): + + """A :class:`.Namespace` specific to a :class:`.Template` instance.""" + + def __init__( + self, + name, + context, + template=None, + templateuri=None, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + if templateuri is not None: + self.template = _lookup_template(context, templateuri, calling_uri) + self._templateuri = self.template.module._template_uri + elif template is not None: + self.template = template + self._templateuri = template.module._template_uri + else: + raise TypeError("'template' argument is required.") + + if populate_self: + lclcallable, lclcontext = _populate_self_namespace( + context, self.template, self_ns=self + ) + + @property + def module(self): + """The Python module referenced by this :class:`.Namespace`. + + If the namespace references a :class:`.Template`, then + this module is the equivalent of ``template.module``, + i.e. the generated module for the template. + + """ + return self.template.module + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.template.filename + + @property + def uri(self): + """The URI for this :class:`.Namespace`'s template. + + I.e. whatever was sent to :meth:`.TemplateLookup.get_template()`. + + This is the equivalent of :attr:`.Template.uri`. + + """ + return self.template.uri + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + + def get(key): + callable_ = self.template._get_def_callable(key) + return functools.partial(callable_, self.context) + + for k in self.template.module._exports: + yield (k, get(k)) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif self.template.has_def(key): + callable_ = self.template._get_def_callable(key) + val = functools.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +class ModuleNamespace(Namespace): + + """A :class:`.Namespace` specific to a Python module instance.""" + + def __init__( + self, + name, + context, + module, + callables=None, + inherits=None, + populate_self=True, + calling_uri=None, + ): + self.name = name + self.context = context + self.inherits = inherits + if callables is not None: + self.callables = {c.__name__: c for c in callables} + + mod = __import__(module) + for token in module.split(".")[1:]: + mod = getattr(mod, token) + self.module = mod + + @property + def filename(self): + """The path of the filesystem file used for this + :class:`.Namespace`'s module or template. + """ + return self.module.__file__ + + def _get_star(self): + if self.callables: + for key in self.callables: + yield (key, self.callables[key]) + for key in dir(self.module): + if key[0] != "_": + callable_ = getattr(self.module, key) + if callable(callable_): + yield key, functools.partial(callable_, self.context) + + def __getattr__(self, key): + if key in self.callables: + val = self.callables[key] + elif hasattr(self.module, key): + callable_ = getattr(self.module, key) + val = functools.partial(callable_, self.context) + elif self.inherits: + val = getattr(self.inherits, key) + else: + raise AttributeError( + "Namespace '%s' has no member '%s'" % (self.name, key) + ) + setattr(self, key, val) + return val + + +def supports_caller(func): + """Apply a caller_stack compatibility decorator to a plain + Python function. + + See the example in :ref:`namespaces_python_modules`. + + """ + + def wrap_stackframe(context, *args, **kwargs): + context.caller_stack._push_frame() + try: + return func(context, *args, **kwargs) + finally: + context.caller_stack._pop_frame() + + return wrap_stackframe + + +def capture(context, callable_, *args, **kwargs): + """Execute the given template def, capturing the output into + a buffer. + + See the example in :ref:`namespaces_python_modules`. + + """ + + if not callable(callable_): + raise exceptions.RuntimeException( + "capture() function expects a callable as " + "its argument (i.e. capture(func, *args, **kwargs))" + ) + context._push_buffer() + try: + callable_(*args, **kwargs) + finally: + buf = context._pop_buffer() + return buf.getvalue() + + +def _decorate_toplevel(fn): + def decorate_render(render_fn): + def go(context, *args, **kw): + def y(*args, **kw): + return render_fn(context, *args, **kw) + + try: + y.__name__ = render_fn.__name__[7:] + except TypeError: + # < Python 2.4 + pass + return fn(y)(context, *args, **kw) + + return go + + return decorate_render + + +def _decorate_inline(context, fn): + def decorate_render(render_fn): + dec = fn(render_fn) + + def go(*args, **kw): + return dec(context, *args, **kw) + + return go + + return decorate_render + + +def _include_file(context, uri, calling_uri, **kwargs): + """locate the template from the given uri and include it in + the current output.""" + + template = _lookup_template(context, uri, calling_uri) + (callable_, ctx) = _populate_self_namespace( + context._clean_inheritance_tokens(), template + ) + kwargs = _kwargs_for_include(callable_, context._data, **kwargs) + if template.include_error_handler: + try: + callable_(ctx, **kwargs) + except Exception: + result = template.include_error_handler(ctx, compat.exception_as()) + if not result: + raise + else: + callable_(ctx, **kwargs) + + +def _inherit_from(context, uri, calling_uri): + """called by the _inherit method in template modules to set + up the inheritance chain at the start of a template's + execution.""" + + if uri is None: + return None + template = _lookup_template(context, uri, calling_uri) + self_ns = context["self"] + ih = self_ns + while ih.inherits is not None: + ih = ih.inherits + lclcontext = context._locals({"next": ih}) + ih.inherits = TemplateNamespace( + "self:%s" % template.uri, + lclcontext, + template=template, + populate_self=False, + ) + context._data["parent"] = lclcontext._data["local"] = ih.inherits + callable_ = getattr(template.module, "_mako_inherit", None) + if callable_ is not None: + ret = callable_(template, lclcontext) + if ret: + return ret + + gen_ns = getattr(template.module, "_mako_generate_namespaces", None) + if gen_ns is not None: + gen_ns(context) + return (template.callable_, lclcontext) + + +def _lookup_template(context, uri, relativeto): + lookup = context._with_template.lookup + if lookup is None: + raise exceptions.TemplateLookupException( + "Template '%s' has no TemplateLookup associated" + % context._with_template.uri + ) + uri = lookup.adjust_uri(uri, relativeto) + try: + return lookup.get_template(uri) + except exceptions.TopLevelLookupException as e: + raise exceptions.TemplateLookupException( + str(compat.exception_as()) + ) from e + + +def _populate_self_namespace(context, template, self_ns=None): + if self_ns is None: + self_ns = TemplateNamespace( + "self:%s" % template.uri, + context, + template=template, + populate_self=False, + ) + context._data["self"] = context._data["local"] = self_ns + if hasattr(template.module, "_mako_inherit"): + ret = template.module._mako_inherit(template, context) + if ret: + return ret + return (template.callable_, context) + + +def _render(template, callable_, args, data, as_unicode=False): + """create a Context and return the string + output of the given template and template callable.""" + + if as_unicode: + buf = util.FastEncodingBuffer() + else: + buf = util.FastEncodingBuffer( + encoding=template.output_encoding, errors=template.encoding_errors + ) + context = Context(buf, **data) + context._outputting_as_unicode = as_unicode + context._set_with_template(template) + + _render_context( + template, + callable_, + context, + *args, + **_kwargs_for_callable(callable_, data), + ) + return context._pop_buffer().getvalue() + + +def _kwargs_for_callable(callable_, data): + argspec = compat.inspect_getargspec(callable_) + # for normal pages, **pageargs is usually present + if argspec[2]: + return data + + # for rendering defs from the top level, figure out the args + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + kwargs = {} + for arg in namedargs: + if arg != "context" and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _kwargs_for_include(callable_, data, **kwargs): + argspec = compat.inspect_getargspec(callable_) + namedargs = argspec[0] + [v for v in argspec[1:3] if v is not None] + for arg in namedargs: + if arg != "context" and arg in data and arg not in kwargs: + kwargs[arg] = data[arg] + return kwargs + + +def _render_context(tmpl, callable_, context, *args, **kwargs): + import mako.template as template + + # create polymorphic 'self' namespace for this + # template with possibly updated context + if not isinstance(tmpl, template.DefTemplate): + # if main render method, call from the base of the inheritance stack + (inherit, lclcontext) = _populate_self_namespace(context, tmpl) + _exec_template(inherit, lclcontext, args=args, kwargs=kwargs) + else: + # otherwise, call the actual rendering method specified + (inherit, lclcontext) = _populate_self_namespace(context, tmpl.parent) + _exec_template(callable_, context, args=args, kwargs=kwargs) + + +def _exec_template(callable_, context, args=None, kwargs=None): + """execute a rendering callable given the callable, a + Context, and optional explicit arguments + + the contextual Template will be located if it exists, and + the error handling options specified on that Template will + be interpreted here. + """ + template = context._with_template + if template is not None and ( + template.format_exceptions or template.error_handler + ): + try: + callable_(context, *args, **kwargs) + except Exception: + _render_error(template, context, compat.exception_as()) + except: + e = sys.exc_info()[0] + _render_error(template, context, e) + else: + callable_(context, *args, **kwargs) + + +def _render_error(template, context, error): + if template.error_handler: + result = template.error_handler(context, error) + if not result: + tp, value, tb = sys.exc_info() + if value and tb: + raise value.with_traceback(tb) + else: + raise error + else: + error_template = exceptions.html_error_template() + if context._outputting_as_unicode: + context._buffer_stack[:] = [util.FastEncodingBuffer()] + else: + context._buffer_stack[:] = [ + util.FastEncodingBuffer( + error_template.output_encoding, + error_template.encoding_errors, + ) + ] + + context._set_with_template(error_template) + error_template.render_context(context, error=error) diff --git a/myenv/lib/python3.10/site-packages/mako/template.py b/myenv/lib/python3.10/site-packages/mako/template.py new file mode 100644 index 000000000..8c70731dc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/template.py @@ -0,0 +1,716 @@ +# mako/template.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php + +"""Provides the Template class, a facade for parsing, generating and executing +template strings, as well as template runtime operations.""" + +import json +import os +import re +import shutil +import stat +import tempfile +import types +import weakref + +from mako import cache +from mako import codegen +from mako import compat +from mako import exceptions +from mako import runtime +from mako import util +from mako.lexer import Lexer + + +class Template: + + r"""Represents a compiled template. + + :class:`.Template` includes a reference to the original + template source (via the :attr:`.source` attribute) + as well as the source code of the + generated Python module (i.e. the :attr:`.code` attribute), + as well as a reference to an actual Python module. + + :class:`.Template` is constructed using either a literal string + representing the template text, or a filename representing a filesystem + path to a source file. + + :param text: textual template source. This argument is mutually + exclusive versus the ``filename`` parameter. + + :param filename: filename of the source template. This argument is + mutually exclusive versus the ``text`` parameter. + + :param buffer_filters: string list of filters to be applied + to the output of ``%def``\ s which are buffered, cached, or otherwise + filtered, after all filters + defined with the ``%def`` itself have been applied. Allows the + creation of default expression filters that let the output + of return-valued ``%def``\ s "opt out" of that filtering via + passing special attributes or objects. + + :param cache_args: Dictionary of cache configuration arguments that + will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`. + + :param cache_dir: + + .. deprecated:: 0.6 + Use the ``'dir'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_enabled: Boolean flag which enables caching of this + template. See :ref:`caching_toplevel`. + + :param cache_impl: String name of a :class:`.CacheImpl` caching + implementation to use. Defaults to ``'beaker'``. + + :param cache_type: + + .. deprecated:: 0.6 + Use the ``'type'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param cache_url: + + .. deprecated:: 0.6 + Use the ``'url'`` argument in the ``cache_args`` dictionary. + See :ref:`caching_toplevel`. + + :param default_filters: List of string filter names that will + be applied to all expressions. See :ref:`filtering_default_filters`. + + :param enable_loop: When ``True``, enable the ``loop`` context variable. + This can be set to ``False`` to support templates that may + be making usage of the name "``loop``". Individual templates can + re-enable the "loop" context by placing the directive + ``enable_loop="True"`` inside the ``<%page>`` tag -- see + :ref:`migrating_loop`. + + :param encoding_errors: Error parameter passed to ``encode()`` when + string encoding is performed. See :ref:`usage_unicode`. + + :param error_handler: Python callable which is called whenever + compile or runtime exceptions occur. The callable is passed + the current context as well as the exception. If the + callable returns ``True``, the exception is considered to + be handled, else it is re-raised after the function + completes. Is used to provide custom error-rendering + functions. + + .. seealso:: + + :paramref:`.Template.include_error_handler` - include-specific + error handler function + + :param format_exceptions: if ``True``, exceptions which occur during + the render phase of this template will be caught and + formatted into an HTML error page, which then becomes the + rendered result of the :meth:`.render` call. Otherwise, + runtime exceptions are propagated outwards. + + :param imports: String list of Python statements, typically individual + "import" lines, which will be placed into the module level + preamble of all generated Python modules. See the example + in :ref:`filtering_default_filters`. + + :param future_imports: String list of names to import from `__future__`. + These will be concatenated into a comma-separated string and inserted + into the beginning of the template, e.g. ``futures_imports=['FOO', + 'BAR']`` results in ``from __future__ import FOO, BAR``. If you're + interested in using features like the new division operator, you must + use future_imports to convey that to the renderer, as otherwise the + import will not appear as the first executed statement in the generated + code and will therefore not have the desired effect. + + :param include_error_handler: An error handler that runs when this template + is included within another one via the ``<%include>`` tag, and raises an + error. Compare to the :paramref:`.Template.error_handler` option. + + .. versionadded:: 1.0.6 + + .. seealso:: + + :paramref:`.Template.error_handler` - top-level error handler function + + :param input_encoding: Encoding of the template's source code. Can + be used in lieu of the coding comment. See + :ref:`usage_unicode` as well as :ref:`unicode_toplevel` for + details on source encoding. + + :param lookup: a :class:`.TemplateLookup` instance that will be used + for all file lookups via the ``<%namespace>``, + ``<%include>``, and ``<%inherit>`` tags. See + :ref:`usage_templatelookup`. + + :param module_directory: Filesystem location where generated + Python module files will be placed. + + :param module_filename: Overrides the filename of the generated + Python module file. For advanced usage only. + + :param module_writer: A callable which overrides how the Python + module is written entirely. The callable is passed the + encoded source content of the module and the destination + path to be written to. The default behavior of module writing + uses a tempfile in conjunction with a file move in order + to make the operation atomic. So a user-defined module + writing function that mimics the default behavior would be: + + .. sourcecode:: python + + import tempfile + import os + import shutil + + def module_writer(source, outputpath): + (dest, name) = \\ + tempfile.mkstemp( + dir=os.path.dirname(outputpath) + ) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + from mako.template import Template + mytemplate = Template( + filename="index.html", + module_directory="/path/to/modules", + module_writer=module_writer + ) + + The function is provided for unusual configurations where + certain platform-specific permissions or other special + steps are needed. + + :param output_encoding: The encoding to use when :meth:`.render` + is called. + See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`. + + :param preprocessor: Python callable which will be passed + the full template source before it is parsed. The return + result of the callable will be used as the template source + code. + + :param lexer_cls: A :class:`.Lexer` class used to parse + the template. The :class:`.Lexer` class is used by + default. + + .. versionadded:: 0.7.4 + + :param strict_undefined: Replaces the automatic usage of + ``UNDEFINED`` for any undeclared variables not located in + the :class:`.Context` with an immediate raise of + ``NameError``. The advantage is immediate reporting of + missing variables which include the name. + + .. versionadded:: 0.3.6 + + :param uri: string URI or other identifier for this template. + If not provided, the ``uri`` is generated from the filesystem + path, or from the in-memory identity of a non-file-based + template. The primary usage of the ``uri`` is to provide a key + within :class:`.TemplateLookup`, as well as to generate the + file path of the generated Python module file, if + ``module_directory`` is specified. + + """ + + lexer_cls = Lexer + + def __init__( + self, + text=None, + filename=None, + uri=None, + format_exceptions=False, + error_handler=None, + lookup=None, + output_encoding=None, + encoding_errors="strict", + module_directory=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + module_filename=None, + input_encoding=None, + module_writer=None, + default_filters=None, + buffer_filters=(), + strict_undefined=False, + imports=None, + future_imports=None, + enable_loop=True, + preprocessor=None, + lexer_cls=None, + include_error_handler=None, + ): + if uri: + self.module_id = re.sub(r"\W", "_", uri) + self.uri = uri + elif filename: + self.module_id = re.sub(r"\W", "_", filename) + drive, path = os.path.splitdrive(filename) + path = os.path.normpath(path).replace(os.path.sep, "/") + self.uri = path + else: + self.module_id = "memory:" + hex(id(self)) + self.uri = self.module_id + + u_norm = self.uri + if u_norm.startswith("/"): + u_norm = u_norm[1:] + u_norm = os.path.normpath(u_norm) + if u_norm.startswith(".."): + raise exceptions.TemplateLookupException( + 'Template uri "%s" is invalid - ' + "it cannot be relative outside " + "of the root path." % self.uri + ) + + self.input_encoding = input_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.enable_loop = enable_loop + self.strict_undefined = strict_undefined + self.module_writer = module_writer + + if default_filters is None: + self.default_filters = ["str"] + else: + self.default_filters = default_filters + self.buffer_filters = buffer_filters + + self.imports = imports + self.future_imports = future_imports + self.preprocessor = preprocessor + + if lexer_cls is not None: + self.lexer_cls = lexer_cls + + # if plain text, compile code in memory only + if text is not None: + (code, module) = _compile_text(self, text, filename) + self._code = code + self._source = text + ModuleInfo(module, None, self, filename, code, text, uri) + elif filename is not None: + # if template filename and a module directory, load + # a filesystem-based module file, generating if needed + if module_filename is not None: + path = module_filename + elif module_directory is not None: + path = os.path.abspath( + os.path.join( + os.path.normpath(module_directory), u_norm + ".py" + ) + ) + else: + path = None + module = self._compile_from_file(path, filename) + else: + raise exceptions.RuntimeException( + "Template requires text or filename" + ) + + self.module = module + self.filename = filename + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.include_error_handler = include_error_handler + self.lookup = lookup + + self.module_directory = module_directory + + self._setup_cache_args( + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ) + + @util.memoized_property + def reserved_names(self): + if self.enable_loop: + return codegen.RESERVED_NAMES + else: + return codegen.RESERVED_NAMES.difference(["loop"]) + + def _setup_cache_args( + self, + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ): + self.cache_impl = cache_impl + self.cache_enabled = cache_enabled + self.cache_args = cache_args or {} + # transfer deprecated cache_* args + if cache_type: + self.cache_args["type"] = cache_type + if cache_dir: + self.cache_args["dir"] = cache_dir + if cache_url: + self.cache_args["url"] = cache_url + + def _compile_from_file(self, path, filename): + if path is not None: + util.verify_directory(os.path.dirname(path)) + filemtime = os.stat(filename)[stat.ST_MTIME] + if ( + not os.path.exists(path) + or os.stat(path)[stat.ST_MTIME] < filemtime + ): + data = util.read_file(filename) + _compile_module_file( + self, data, filename, path, self.module_writer + ) + module = compat.load_module(self.module_id, path) + if module._magic_number != codegen.MAGIC_NUMBER: + data = util.read_file(filename) + _compile_module_file( + self, data, filename, path, self.module_writer + ) + module = compat.load_module(self.module_id, path) + ModuleInfo(module, path, self, filename, None, None, None) + else: + # template filename and no module directory, compile code + # in memory + data = util.read_file(filename) + code, module = _compile_text(self, data, filename) + self._source = None + self._code = code + ModuleInfo(module, None, self, filename, code, None, None) + return module + + @property + def source(self): + """Return the template source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).source + + @property + def code(self): + """Return the module source code for this :class:`.Template`.""" + + return _get_module_info_from_callable(self.callable_).code + + @util.memoized_property + def cache(self): + return cache.Cache(self) + + @property + def cache_dir(self): + return self.cache_args["dir"] + + @property + def cache_url(self): + return self.cache_args["url"] + + @property + def cache_type(self): + return self.cache_args["type"] + + def render(self, *args, **data): + """Render the output of this template as a string. + + If the template specifies an output encoding, the string + will be encoded accordingly, else the output is raw (raw + output uses `StringIO` and can't handle multibyte + characters). A :class:`.Context` object is created corresponding + to the given data. Arguments that are explicitly declared + by this template's internal rendering method are also + pulled from the given ``*args``, ``**data`` members. + + """ + return runtime._render(self, self.callable_, args, data) + + def render_unicode(self, *args, **data): + """Render the output of this template as a unicode object.""" + + return runtime._render( + self, self.callable_, args, data, as_unicode=True + ) + + def render_context(self, context, *args, **kwargs): + """Render this :class:`.Template` with the given context. + + The data is written to the context's buffer. + + """ + if getattr(context, "_with_template", None) is None: + context._set_with_template(self) + runtime._render_context(self, self.callable_, context, *args, **kwargs) + + def has_def(self, name): + return hasattr(self.module, "render_%s" % name) + + def get_def(self, name): + """Return a def of this template as a :class:`.DefTemplate`.""" + + return DefTemplate(self, getattr(self.module, "render_%s" % name)) + + def list_defs(self): + """return a list of defs in the template. + + .. versionadded:: 1.0.4 + + """ + return [i[7:] for i in dir(self.module) if i[:7] == "render_"] + + def _get_def_callable(self, name): + return getattr(self.module, "render_%s" % name) + + @property + def last_modified(self): + return self.module._modified_time + + +class ModuleTemplate(Template): + + """A Template which is constructed given an existing Python module. + + e.g.:: + + t = Template("this is a template") + f = file("mymodule.py", "w") + f.write(t.code) + f.close() + + import mymodule + + t = ModuleTemplate(mymodule) + print(t.render()) + + """ + + def __init__( + self, + module, + module_filename=None, + template=None, + template_filename=None, + module_source=None, + template_source=None, + output_encoding=None, + encoding_errors="strict", + format_exceptions=False, + error_handler=None, + lookup=None, + cache_args=None, + cache_impl="beaker", + cache_enabled=True, + cache_type=None, + cache_dir=None, + cache_url=None, + include_error_handler=None, + ): + self.module_id = re.sub(r"\W", "_", module._template_uri) + self.uri = module._template_uri + self.input_encoding = module._source_encoding + self.output_encoding = output_encoding + self.encoding_errors = encoding_errors + self.enable_loop = module._enable_loop + + self.module = module + self.filename = template_filename + ModuleInfo( + module, + module_filename, + self, + template_filename, + module_source, + template_source, + module._template_uri, + ) + + self.callable_ = self.module.render_body + self.format_exceptions = format_exceptions + self.error_handler = error_handler + self.include_error_handler = include_error_handler + self.lookup = lookup + self._setup_cache_args( + cache_impl, + cache_enabled, + cache_args, + cache_type, + cache_dir, + cache_url, + ) + + +class DefTemplate(Template): + + """A :class:`.Template` which represents a callable def in a parent + template.""" + + def __init__(self, parent, callable_): + self.parent = parent + self.callable_ = callable_ + self.output_encoding = parent.output_encoding + self.module = parent.module + self.encoding_errors = parent.encoding_errors + self.format_exceptions = parent.format_exceptions + self.error_handler = parent.error_handler + self.include_error_handler = parent.include_error_handler + self.enable_loop = parent.enable_loop + self.lookup = parent.lookup + + def get_def(self, name): + return self.parent.get_def(name) + + +class ModuleInfo: + + """Stores information about a module currently loaded into + memory, provides reverse lookups of template source, module + source code based on a module's identifier. + + """ + + _modules = weakref.WeakValueDictionary() + + def __init__( + self, + module, + module_filename, + template, + template_filename, + module_source, + template_source, + template_uri, + ): + self.module = module + self.module_filename = module_filename + self.template_filename = template_filename + self.module_source = module_source + self.template_source = template_source + self.template_uri = template_uri + self._modules[module.__name__] = template._mmarker = self + if module_filename: + self._modules[module_filename] = self + + @classmethod + def get_module_source_metadata(cls, module_source, full_line_map=False): + source_map = re.search( + r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S + ).group(1) + source_map = json.loads(source_map) + source_map["line_map"] = { + int(k): int(v) for k, v in source_map["line_map"].items() + } + if full_line_map: + f_line_map = source_map["full_line_map"] = [] + line_map = source_map["line_map"] + + curr_templ_line = 1 + for mod_line in range(1, max(line_map)): + if mod_line in line_map: + curr_templ_line = line_map[mod_line] + f_line_map.append(curr_templ_line) + return source_map + + @property + def code(self): + if self.module_source is not None: + return self.module_source + else: + return util.read_python_file(self.module_filename) + + @property + def source(self): + if self.template_source is None: + data = util.read_file(self.template_filename) + if self.module._source_encoding: + return data.decode(self.module._source_encoding) + else: + return data + + elif self.module._source_encoding and not isinstance( + self.template_source, str + ): + return self.template_source.decode(self.module._source_encoding) + else: + return self.template_source + + +def _compile(template, text, filename, generate_magic_comment): + lexer = template.lexer_cls( + text, + filename, + input_encoding=template.input_encoding, + preprocessor=template.preprocessor, + ) + node = lexer.parse() + source = codegen.compile( + node, + template.uri, + filename, + default_filters=template.default_filters, + buffer_filters=template.buffer_filters, + imports=template.imports, + future_imports=template.future_imports, + source_encoding=lexer.encoding, + generate_magic_comment=generate_magic_comment, + strict_undefined=template.strict_undefined, + enable_loop=template.enable_loop, + reserved_names=template.reserved_names, + ) + return source, lexer + + +def _compile_text(template, text, filename): + identifier = template.module_id + source, lexer = _compile( + template, text, filename, generate_magic_comment=False + ) + + cid = identifier + module = types.ModuleType(cid) + code = compile(source, cid, "exec") + + # this exec() works for 2.4->3.3. + exec(code, module.__dict__, module.__dict__) + return (source, module) + + +def _compile_module_file(template, text, filename, outputpath, module_writer): + source, lexer = _compile( + template, text, filename, generate_magic_comment=True + ) + + if isinstance(source, str): + source = source.encode(lexer.encoding or "ascii") + + if module_writer: + module_writer(source, outputpath) + else: + # make tempfiles in the same location as the ultimate + # location. this ensures they're on the same filesystem, + # avoiding synchronization issues. + (dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath)) + + os.write(dest, source) + os.close(dest) + shutil.move(name, outputpath) + + +def _get_module_info_from_callable(callable_): + return _get_module_info(callable_.__globals__["__name__"]) + + +def _get_module_info(filename): + return ModuleInfo._modules[filename] diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__init__.py b/myenv/lib/python3.10/site-packages/mako/testing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..98dea2f99 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/_config.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/_config.cpython-310.pyc new file mode 100644 index 000000000..32229dcc3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/_config.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/assertions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/assertions.cpython-310.pyc new file mode 100644 index 000000000..a202b2f16 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/assertions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/config.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/config.cpython-310.pyc new file mode 100644 index 000000000..b7ca4dcb9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/config.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/exclusions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/exclusions.cpython-310.pyc new file mode 100644 index 000000000..eb059e8cf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/exclusions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/fixtures.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/fixtures.cpython-310.pyc new file mode 100644 index 000000000..fbd2d3c63 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/fixtures.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/helpers.cpython-310.pyc b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 000000000..79a527a80 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/mako/testing/__pycache__/helpers.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/mako/testing/_config.py b/myenv/lib/python3.10/site-packages/mako/testing/_config.py new file mode 100644 index 000000000..4ee3d0a6e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/testing/_config.py @@ -0,0 +1,128 @@ +import configparser +import dataclasses +from dataclasses import dataclass +from pathlib import Path +from typing import Callable +from typing import ClassVar +from typing import Optional +from typing import Union + +from .helpers import make_path + + +class ConfigError(BaseException): + pass + + +class MissingConfig(ConfigError): + pass + + +class MissingConfigSection(ConfigError): + pass + + +class MissingConfigItem(ConfigError): + pass + + +class ConfigValueTypeError(ConfigError): + pass + + +class _GetterDispatch: + def __init__(self, initialdata, default_getter: Callable): + self.default_getter = default_getter + self.data = initialdata + + def get_fn_for_type(self, type_): + return self.data.get(type_, self.default_getter) + + def get_typed_value(self, type_, name): + get_fn = self.get_fn_for_type(type_) + return get_fn(name) + + +def _parse_cfg_file(filespec: Union[Path, str]): + cfg = configparser.ConfigParser() + try: + filepath = make_path(filespec, check_exists=True) + except FileNotFoundError as e: + raise MissingConfig(f"No config file found at {filespec}") from e + else: + with open(filepath, encoding="utf-8") as f: + cfg.read_file(f) + return cfg + + +def _build_getter(cfg_obj, cfg_section, method, converter=None): + def caller(option, **kwargs): + try: + rv = getattr(cfg_obj, method)(cfg_section, option, **kwargs) + except configparser.NoSectionError as nse: + raise MissingConfigSection( + f"No config section named {cfg_section}" + ) from nse + except configparser.NoOptionError as noe: + raise MissingConfigItem(f"No config item for {option}") from noe + except ValueError as ve: + # ConfigParser.getboolean, .getint, .getfloat raise ValueError + # on bad types + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from ve + else: + if converter: + try: + rv = converter(rv) + except Exception as e: + raise ConfigValueTypeError( + f"Wrong value type for {option}" + ) from e + return rv + + return caller + + +def _build_getter_dispatch(cfg_obj, cfg_section, converters=None): + converters = converters or {} + + default_getter = _build_getter(cfg_obj, cfg_section, "get") + + # support ConfigParser builtins + getters = { + int: _build_getter(cfg_obj, cfg_section, "getint"), + bool: _build_getter(cfg_obj, cfg_section, "getboolean"), + float: _build_getter(cfg_obj, cfg_section, "getfloat"), + str: default_getter, + } + + # use ConfigParser.get and convert value + getters.update( + { + type_: _build_getter( + cfg_obj, cfg_section, "get", converter=converter_fn + ) + for type_, converter_fn in converters.items() + } + ) + + return _GetterDispatch(getters, default_getter) + + +@dataclass +class ReadsCfg: + section_header: ClassVar[str] + converters: ClassVar[Optional[dict]] = None + + @classmethod + def from_cfg_file(cls, filespec: Union[Path, str]): + cfg = _parse_cfg_file(filespec) + dispatch = _build_getter_dispatch( + cfg, cls.section_header, converters=cls.converters + ) + kwargs = { + field.name: dispatch.get_typed_value(field.type, field.name) + for field in dataclasses.fields(cls) + } + return cls(**kwargs) diff --git a/myenv/lib/python3.10/site-packages/mako/testing/assertions.py b/myenv/lib/python3.10/site-packages/mako/testing/assertions.py new file mode 100644 index 000000000..14ea63523 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/testing/assertions.py @@ -0,0 +1,167 @@ +import contextlib +import re +import sys + + +def eq_(a, b, msg=None): + """Assert a == b, with repr messaging on failure.""" + assert a == b, msg or "%r != %r" % (a, b) + + +def ne_(a, b, msg=None): + """Assert a != b, with repr messaging on failure.""" + assert a != b, msg or "%r == %r" % (a, b) + + +def in_(a, b, msg=None): + """Assert a in b, with repr messaging on failure.""" + assert a in b, msg or "%r not in %r" % (a, b) + + +def not_in(a, b, msg=None): + """Assert a in not b, with repr messaging on failure.""" + assert a not in b, msg or "%r is in %r" % (a, b) + + +def _assert_proper_exception_context(exception): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + + if ( + exception.__context__ is not exception.__cause__ + and not exception.__suppress_context__ + ): + assert False, ( + "Exception %r was correctly raised but did not set a cause, " + "within context %r as its cause." + % (exception, exception.__context__) + ) + + +def _assert_proper_cause_cls(exception, cause_cls): + """assert that any exception we're catching does not have a __context__ + without a __cause__, and that __suppress_context__ is never set. + + Python 3 will report nested as exceptions as "during the handling of + error X, error Y occurred". That's not what we want to do. We want + these exceptions in a cause chain. + + """ + assert isinstance(exception.__cause__, cause_cls), ( + "Exception %r was correctly raised but has cause %r, which does not " + "have the expected cause type %r." + % (exception, exception.__cause__, cause_cls) + ) + + +def assert_raises(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw) + + +def assert_raises_with_proper_context(except_cls, callable_, *args, **kw): + return _assert_raises(except_cls, callable_, args, kw, check_context=True) + + +def assert_raises_with_given_cause( + except_cls, cause_cls, callable_, *args, **kw +): + return _assert_raises(except_cls, callable_, args, kw, cause_cls=cause_cls) + + +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): + return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) + + +def assert_raises_message_with_proper_context( + except_cls, msg, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, check_context=True + ) + + +def assert_raises_message_with_given_cause( + except_cls, msg, cause_cls, callable_, *args, **kwargs +): + return _assert_raises( + except_cls, callable_, args, kwargs, msg=msg, cause_cls=cause_cls + ) + + +def _assert_raises( + except_cls, + callable_, + args, + kwargs, + msg=None, + check_context=False, + cause_cls=None, +): + + with _expect_raises(except_cls, msg, check_context, cause_cls) as ec: + callable_(*args, **kwargs) + return ec.error + + +class _ErrorContainer: + error = None + + +@contextlib.contextmanager +def _expect_raises(except_cls, msg=None, check_context=False, cause_cls=None): + ec = _ErrorContainer() + if check_context: + are_we_already_in_a_traceback = sys.exc_info()[0] + try: + yield ec + success = False + except except_cls as err: + ec.error = err + success = True + if msg is not None: + # I'm often pdbing here, and "err" above isn't + # in scope, so assign the string explicitly + error_as_string = str(err) + assert re.search(msg, error_as_string, re.UNICODE), "%r !~ %s" % ( + msg, + error_as_string, + ) + if cause_cls is not None: + _assert_proper_cause_cls(err, cause_cls) + if check_context and not are_we_already_in_a_traceback: + _assert_proper_exception_context(err) + print(str(err).encode("utf-8")) + + # it's generally a good idea to not carry traceback objects outside + # of the except: block, but in this case especially we seem to have + # hit some bug in either python 3.10.0b2 or greenlet or both which + # this seems to fix: + # https://github.com/python-greenlet/greenlet/issues/242 + del ec + + # assert outside the block so it works for AssertionError too ! + assert success, "Callable did not raise an exception" + + +def expect_raises(except_cls, check_context=False): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message(except_cls, msg, check_context=False): + return _expect_raises(except_cls, msg=msg, check_context=check_context) + + +def expect_raises_with_proper_context(except_cls, check_context=True): + return _expect_raises(except_cls, check_context=check_context) + + +def expect_raises_message_with_proper_context( + except_cls, msg, check_context=True +): + return _expect_raises(except_cls, msg=msg, check_context=check_context) diff --git a/myenv/lib/python3.10/site-packages/mako/testing/config.py b/myenv/lib/python3.10/site-packages/mako/testing/config.py new file mode 100644 index 000000000..b77d0c080 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/testing/config.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from pathlib import Path + +from ._config import ReadsCfg +from .helpers import make_path + + +@dataclass +class Config(ReadsCfg): + module_base: Path + template_base: Path + + section_header = "mako_testing" + converters = {Path: make_path} + + +config = Config.from_cfg_file("./setup.cfg") diff --git a/myenv/lib/python3.10/site-packages/mako/testing/exclusions.py b/myenv/lib/python3.10/site-packages/mako/testing/exclusions.py new file mode 100644 index 000000000..37b2d14a3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/testing/exclusions.py @@ -0,0 +1,80 @@ +import pytest + +from mako.ext.beaker_cache import has_beaker +from mako.util import update_wrapper + + +try: + import babel.messages.extract as babel +except ImportError: + babel = None + + +try: + import lingua +except ImportError: + lingua = None + + +try: + import dogpile.cache # noqa +except ImportError: + has_dogpile_cache = False +else: + has_dogpile_cache = True + + +requires_beaker = pytest.mark.skipif( + not has_beaker, reason="Beaker is required for these tests." +) + + +requires_babel = pytest.mark.skipif( + babel is None, reason="babel not installed: skipping babelplugin test" +) + + +requires_lingua = pytest.mark.skipif( + lingua is None, reason="lingua not installed: skipping linguaplugin test" +) + + +requires_dogpile_cache = pytest.mark.skipif( + not has_dogpile_cache, + reason="dogpile.cache is required to run these tests", +) + + +def _pygments_version(): + try: + import pygments + + version = pygments.__version__ + except: + version = "0" + return version + + +requires_pygments_14 = pytest.mark.skipif( + _pygments_version() < "1.4", reason="Requires pygments 1.4 or greater" +) + + +# def requires_pygments_14(fn): + +# return skip_if( +# lambda: version < "1.4", "Requires pygments 1.4 or greater" +# )(fn) + + +def requires_no_pygments_exceptions(fn): + def go(*arg, **kw): + from mako import exceptions + + exceptions._install_fallback() + try: + return fn(*arg, **kw) + finally: + exceptions._install_highlighting() + + return update_wrapper(go, fn) diff --git a/myenv/lib/python3.10/site-packages/mako/testing/fixtures.py b/myenv/lib/python3.10/site-packages/mako/testing/fixtures.py new file mode 100644 index 000000000..01e996171 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/testing/fixtures.py @@ -0,0 +1,119 @@ +import os + +from mako.cache import CacheImpl +from mako.cache import register_plugin +from mako.template import Template +from .assertions import eq_ +from .config import config + + +class TemplateTest: + def _file_template(self, filename, **kw): + filepath = self._file_path(filename) + return Template( + uri=filename, + filename=filepath, + module_directory=config.module_base, + **kw, + ) + + def _file_path(self, filename): + name, ext = os.path.splitext(filename) + py3k_path = os.path.join(config.template_base, name + "_py3k" + ext) + if os.path.exists(py3k_path): + return py3k_path + + return os.path.join(config.template_base, filename) + + def _do_file_test( + self, + filename, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = self._file_template(filename, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_memory_test( + self, + source, + expected, + filters=None, + unicode_=True, + template_args=None, + **kw, + ): + t1 = Template(text=source, **kw) + self._do_test( + t1, + expected, + filters=filters, + unicode_=unicode_, + template_args=template_args, + ) + + def _do_test( + self, + template, + expected, + filters=None, + template_args=None, + unicode_=True, + ): + if template_args is None: + template_args = {} + if unicode_: + output = template.render_unicode(**template_args) + else: + output = template.render(**template_args) + + if filters: + output = filters(output) + eq_(output, expected) + + def indicates_unbound_local_error(self, rendered_output, unbound_var): + var = f"'{unbound_var}'" + error_msgs = ( + # < 3.11 + f"local variable {var} referenced before assignment", + # >= 3.11 + f"cannot access local variable {var} where it is not associated", + ) + return any((msg in rendered_output) for msg in error_msgs) + + +class PlainCacheImpl(CacheImpl): + """Simple memory cache impl so that tests which + use caching can run without beaker.""" + + def __init__(self, cache): + self.cache = cache + self.data = {} + + def get_or_create(self, key, creation_function, **kw): + if key in self.data: + return self.data[key] + else: + self.data[key] = data = creation_function(**kw) + return data + + def put(self, key, value, **kw): + self.data[key] = value + + def get(self, key, **kw): + return self.data[key] + + def invalidate(self, key, **kw): + del self.data[key] + + +register_plugin("plain", __name__, "PlainCacheImpl") diff --git a/myenv/lib/python3.10/site-packages/mako/testing/helpers.py b/myenv/lib/python3.10/site-packages/mako/testing/helpers.py new file mode 100644 index 000000000..77cca3672 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/testing/helpers.py @@ -0,0 +1,67 @@ +import contextlib +import pathlib +from pathlib import Path +import re +import time +from typing import Union +from unittest import mock + + +def flatten_result(result): + return re.sub(r"[\s\r\n]+", " ", result).strip() + + +def result_lines(result): + return [ + x.strip() + for x in re.split(r"\r?\n", re.sub(r" +", " ", result)) + if x.strip() != "" + ] + + +def make_path( + filespec: Union[Path, str], + make_absolute: bool = True, + check_exists: bool = False, +) -> Path: + path = Path(filespec) + if make_absolute: + path = path.resolve(strict=check_exists) + if check_exists and (not path.exists()): + raise FileNotFoundError(f"No file or directory at {filespec}") + return path + + +def _unlink_path(path, missing_ok=False): + # Replicate 3.8+ functionality in 3.7 + cm = contextlib.nullcontext() + if missing_ok: + cm = contextlib.suppress(FileNotFoundError) + + with cm: + path.unlink() + + +def replace_file_with_dir(pathspec): + path = pathlib.Path(pathspec) + _unlink_path(path, missing_ok=True) + path.mkdir(exist_ok=True) + return path + + +def file_with_template_code(filespec): + with open(filespec, "w") as f: + f.write( + """ +i am an artificial template just for you +""" + ) + return filespec + + +@contextlib.contextmanager +def rewind_compile_time(hours=1): + rewound = time.time() - (hours * 3_600) + with mock.patch("mako.codegen.time") as codegen_time: + codegen_time.time.return_value = rewound + yield diff --git a/myenv/lib/python3.10/site-packages/mako/util.py b/myenv/lib/python3.10/site-packages/mako/util.py new file mode 100644 index 000000000..5fb683b4f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/mako/util.py @@ -0,0 +1,388 @@ +# mako/util.py +# Copyright 2006-2022 the Mako authors and contributors +# +# This module is part of Mako and is released under +# the MIT License: http://www.opensource.org/licenses/mit-license.php +from ast import parse +import codecs +import collections +import operator +import os +import re +import timeit + +from .compat import importlib_metadata_get + + +def update_wrapper(decorated, fn): + decorated.__wrapped__ = fn + decorated.__name__ = fn.__name__ + return decorated + + +class PluginLoader: + def __init__(self, group): + self.group = group + self.impls = {} + + def load(self, name): + if name in self.impls: + return self.impls[name]() + + for impl in importlib_metadata_get(self.group): + if impl.name == name: + self.impls[name] = impl.load + return impl.load() + + from mako import exceptions + + raise exceptions.RuntimeException( + "Can't load plugin %s %s" % (self.group, name) + ) + + def register(self, name, modulepath, objname): + def load(): + mod = __import__(modulepath) + for token in modulepath.split(".")[1:]: + mod = getattr(mod, token) + return getattr(mod, objname) + + self.impls[name] = load + + +def verify_directory(dir_): + """create and/or verify a filesystem directory.""" + + tries = 0 + + while not os.path.exists(dir_): + try: + tries += 1 + os.makedirs(dir_, 0o755) + except: + if tries > 5: + raise + + +def to_list(x, default=None): + if x is None: + return default + if not isinstance(x, (list, tuple)): + return [x] + else: + return x + + +class memoized_property: + + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +class memoized_instancemethod: + + """Decorate a method memoize its return value. + + Best applied to no-arg methods: memoization is not sensitive to + argument values, and will always return the same value even when + called with different arguments. + + """ + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return self + + def oneshot(*args, **kw): + result = self.fget(obj, *args, **kw) + + def memo(*a, **kw): + return result + + memo.__name__ = self.__name__ + memo.__doc__ = self.__doc__ + obj.__dict__[self.__name__] = memo + return result + + oneshot.__name__ = self.__name__ + oneshot.__doc__ = self.__doc__ + return oneshot + + +class SetLikeDict(dict): + + """a dictionary that has some setlike methods on it""" + + def union(self, other): + """produce a 'union' of this dict and another (at the key level). + + values in the second dict take precedence over that of the first""" + x = SetLikeDict(**self) + x.update(other) + return x + + +class FastEncodingBuffer: + + """a very rudimentary buffer that is faster than StringIO, + and supports unicode data.""" + + def __init__(self, encoding=None, errors="strict"): + self.data = collections.deque() + self.encoding = encoding + self.delim = "" + self.errors = errors + self.write = self.data.append + + def truncate(self): + self.data = collections.deque() + self.write = self.data.append + + def getvalue(self): + if self.encoding: + return self.delim.join(self.data).encode( + self.encoding, self.errors + ) + else: + return self.delim.join(self.data) + + +class LRUCache(dict): + + """A dictionary-like object that stores a limited number of items, + discarding lesser used items periodically. + + this is a rewrite of LRUCache from Myghty to use a periodic timestamp-based + paradigm so that synchronization is not really needed. the size management + is inexact. + """ + + class _Item: + def __init__(self, key, value): + self.key = key + self.value = value + self.timestamp = timeit.default_timer() + + def __repr__(self): + return repr(self.value) + + def __init__(self, capacity, threshold=0.5): + self.capacity = capacity + self.threshold = threshold + + def __getitem__(self, key): + item = dict.__getitem__(self, key) + item.timestamp = timeit.default_timer() + return item.value + + def values(self): + return [i.value for i in dict.values(self)] + + def setdefault(self, key, value): + if key in self: + return self[key] + self[key] = value + return value + + def __setitem__(self, key, value): + item = dict.get(self, key) + if item is None: + item = self._Item(key, value) + dict.__setitem__(self, key, item) + else: + item.value = value + self._manage_size() + + def _manage_size(self): + while len(self) > self.capacity + self.capacity * self.threshold: + bytime = sorted( + dict.values(self), + key=operator.attrgetter("timestamp"), + reverse=True, + ) + for item in bytime[self.capacity :]: + try: + del self[item.key] + except KeyError: + # if we couldn't find a key, most likely some other thread + # broke in on us. loop around and try again + break + + +# Regexp to match python magic encoding line +_PYTHON_MAGIC_COMMENT_re = re.compile( + r"[ \t\f]* \# .* coding[=:][ \t]*([-\w.]+)", re.VERBOSE +) + + +def parse_encoding(fp): + """Deduce the encoding of a Python source file (binary mode) from magic + comment. + + It does this in the same way as the `Python interpreter`__ + + .. __: http://docs.python.org/ref/encodings.html + + The ``fp`` argument should be a seekable file object in binary mode. + """ + pos = fp.tell() + fp.seek(0) + try: + line1 = fp.readline() + has_bom = line1.startswith(codecs.BOM_UTF8) + if has_bom: + line1 = line1[len(codecs.BOM_UTF8) :] + + m = _PYTHON_MAGIC_COMMENT_re.match(line1.decode("ascii", "ignore")) + if not m: + try: + parse(line1.decode("ascii", "ignore")) + except (ImportError, SyntaxError): + # Either it's a real syntax error, in which case the source + # is not valid python source, or line2 is a continuation of + # line1, in which case we don't want to scan line2 for a magic + # comment. + pass + else: + line2 = fp.readline() + m = _PYTHON_MAGIC_COMMENT_re.match( + line2.decode("ascii", "ignore") + ) + + if has_bom: + if m: + raise SyntaxError( + "python refuses to compile code with both a UTF8" + " byte-order-mark and a magic encoding comment" + ) + return "utf_8" + elif m: + return m.group(1) + else: + return None + finally: + fp.seek(pos) + + +def sorted_dict_repr(d): + """repr() a dictionary with the keys in order. + + Used by the lexer unit test to compare parse trees based on strings. + + """ + keys = list(d.keys()) + keys.sort() + return "{" + ", ".join("%r: %r" % (k, d[k]) for k in keys) + "}" + + +def restore__ast(_ast): + """Attempt to restore the required classes to the _ast module if it + appears to be missing them + """ + if hasattr(_ast, "AST"): + return + _ast.PyCF_ONLY_AST = 2 << 9 + m = compile( + """\ +def foo(): pass +class Bar: pass +if False: pass +baz = 'mako' +1 + 2 - 3 * 4 / 5 +6 // 7 % 8 << 9 >> 10 +11 & 12 ^ 13 | 14 +15 and 16 or 17 +-baz + (not +18) - ~17 +baz and 'foo' or 'bar' +(mako is baz == baz) is not baz != mako +mako > baz < mako >= baz <= mako +mako in baz not in mako""", + "", + "exec", + _ast.PyCF_ONLY_AST, + ) + _ast.Module = type(m) + + for cls in _ast.Module.__mro__: + if cls.__name__ == "mod": + _ast.mod = cls + elif cls.__name__ == "AST": + _ast.AST = cls + + _ast.FunctionDef = type(m.body[0]) + _ast.ClassDef = type(m.body[1]) + _ast.If = type(m.body[2]) + + _ast.Name = type(m.body[3].targets[0]) + _ast.Store = type(m.body[3].targets[0].ctx) + _ast.Str = type(m.body[3].value) + + _ast.Sub = type(m.body[4].value.op) + _ast.Add = type(m.body[4].value.left.op) + _ast.Div = type(m.body[4].value.right.op) + _ast.Mult = type(m.body[4].value.right.left.op) + + _ast.RShift = type(m.body[5].value.op) + _ast.LShift = type(m.body[5].value.left.op) + _ast.Mod = type(m.body[5].value.left.left.op) + _ast.FloorDiv = type(m.body[5].value.left.left.left.op) + + _ast.BitOr = type(m.body[6].value.op) + _ast.BitXor = type(m.body[6].value.left.op) + _ast.BitAnd = type(m.body[6].value.left.left.op) + + _ast.Or = type(m.body[7].value.op) + _ast.And = type(m.body[7].value.values[0].op) + + _ast.Invert = type(m.body[8].value.right.op) + _ast.Not = type(m.body[8].value.left.right.op) + _ast.UAdd = type(m.body[8].value.left.right.operand.op) + _ast.USub = type(m.body[8].value.left.left.op) + + _ast.Or = type(m.body[9].value.op) + _ast.And = type(m.body[9].value.values[0].op) + + _ast.IsNot = type(m.body[10].value.ops[0]) + _ast.NotEq = type(m.body[10].value.ops[1]) + _ast.Is = type(m.body[10].value.left.ops[0]) + _ast.Eq = type(m.body[10].value.left.ops[1]) + + _ast.Gt = type(m.body[11].value.ops[0]) + _ast.Lt = type(m.body[11].value.ops[1]) + _ast.GtE = type(m.body[11].value.ops[2]) + _ast.LtE = type(m.body[11].value.ops[3]) + + _ast.In = type(m.body[12].value.ops[0]) + _ast.NotIn = type(m.body[12].value.ops[1]) + + +def read_file(path, mode="rb"): + with open(path, mode) as fp: + return fp.read() + + +def read_python_file(path): + fp = open(path, "rb") + try: + encoding = parse_encoding(fp) + data = fp.read() + if encoding: + data = data.decode(encoding) + return data + finally: + fp.close() diff --git a/myenv/lib/python3.10/site-packages/markupsafe/__init__.py b/myenv/lib/python3.10/site-packages/markupsafe/__init__.py new file mode 100644 index 000000000..7166b1920 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/markupsafe/__init__.py @@ -0,0 +1,295 @@ +import functools +import re +import string +import typing as t + +if t.TYPE_CHECKING: + import typing_extensions as te + + class HasHTML(te.Protocol): + def __html__(self) -> str: + pass + + +__version__ = "2.1.2" + +_strip_comments_re = re.compile(r"", re.DOTALL) +_strip_tags_re = re.compile(r"<.*?>", re.DOTALL) + + +def _simple_escaping_wrapper(name: str) -> t.Callable[..., "Markup"]: + orig = getattr(str, name) + + @functools.wraps(orig) + def wrapped(self: "Markup", *args: t.Any, **kwargs: t.Any) -> "Markup": + args = _escape_argspec(list(args), enumerate(args), self.escape) # type: ignore + _escape_argspec(kwargs, kwargs.items(), self.escape) + return self.__class__(orig(self, *args, **kwargs)) + + return wrapped + + +class Markup(str): + """A string that is ready to be safely inserted into an HTML or XML + document, either because it was escaped or because it was marked + safe. + + Passing an object to the constructor converts it to text and wraps + it to mark it safe without escaping. To escape the text, use the + :meth:`escape` class method instead. + + >>> Markup("Hello, World!") + Markup('Hello, World!') + >>> Markup(42) + Markup('42') + >>> Markup.escape("Hello, World!") + Markup('Hello <em>World</em>!') + + This implements the ``__html__()`` interface that some frameworks + use. Passing an object that implements ``__html__()`` will wrap the + output of that method, marking it safe. + + >>> class Foo: + ... def __html__(self): + ... return 'foo' + ... + >>> Markup(Foo()) + Markup('foo') + + This is a subclass of :class:`str`. It has the same methods, but + escapes their arguments and returns a ``Markup`` instance. + + >>> Markup("%s") % ("foo & bar",) + Markup('foo & bar') + >>> Markup("Hello ") + "" + Markup('Hello <foo>') + """ + + __slots__ = () + + def __new__( + cls, base: t.Any = "", encoding: t.Optional[str] = None, errors: str = "strict" + ) -> "Markup": + if hasattr(base, "__html__"): + base = base.__html__() + + if encoding is None: + return super().__new__(cls, base) + + return super().__new__(cls, base, encoding, errors) + + def __html__(self) -> "Markup": + return self + + def __add__(self, other: t.Union[str, "HasHTML"]) -> "Markup": + if isinstance(other, str) or hasattr(other, "__html__"): + return self.__class__(super().__add__(self.escape(other))) + + return NotImplemented + + def __radd__(self, other: t.Union[str, "HasHTML"]) -> "Markup": + if isinstance(other, str) or hasattr(other, "__html__"): + return self.escape(other).__add__(self) + + return NotImplemented + + def __mul__(self, num: "te.SupportsIndex") -> "Markup": + if isinstance(num, int): + return self.__class__(super().__mul__(num)) + + return NotImplemented + + __rmul__ = __mul__ + + def __mod__(self, arg: t.Any) -> "Markup": + if isinstance(arg, tuple): + # a tuple of arguments, each wrapped + arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg) + elif hasattr(type(arg), "__getitem__") and not isinstance(arg, str): + # a mapping of arguments, wrapped + arg = _MarkupEscapeHelper(arg, self.escape) + else: + # a single argument, wrapped with the helper and a tuple + arg = (_MarkupEscapeHelper(arg, self.escape),) + + return self.__class__(super().__mod__(arg)) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + def join(self, seq: t.Iterable[t.Union[str, "HasHTML"]]) -> "Markup": + return self.__class__(super().join(map(self.escape, seq))) + + join.__doc__ = str.join.__doc__ + + def split( # type: ignore + self, sep: t.Optional[str] = None, maxsplit: int = -1 + ) -> t.List["Markup"]: + return [self.__class__(v) for v in super().split(sep, maxsplit)] + + split.__doc__ = str.split.__doc__ + + def rsplit( # type: ignore + self, sep: t.Optional[str] = None, maxsplit: int = -1 + ) -> t.List["Markup"]: + return [self.__class__(v) for v in super().rsplit(sep, maxsplit)] + + rsplit.__doc__ = str.rsplit.__doc__ + + def splitlines(self, keepends: bool = False) -> t.List["Markup"]: # type: ignore + return [self.__class__(v) for v in super().splitlines(keepends)] + + splitlines.__doc__ = str.splitlines.__doc__ + + def unescape(self) -> str: + """Convert escaped markup back into a text string. This replaces + HTML entities with the characters they represent. + + >>> Markup("Main » About").unescape() + 'Main » About' + """ + from html import unescape + + return unescape(str(self)) + + def striptags(self) -> str: + """:meth:`unescape` the markup, remove tags, and normalize + whitespace to single spaces. + + >>> Markup("Main »\tAbout").striptags() + 'Main » About' + """ + # Use two regexes to avoid ambiguous matches. + value = _strip_comments_re.sub("", self) + value = _strip_tags_re.sub("", value) + value = " ".join(value.split()) + return Markup(value).unescape() + + @classmethod + def escape(cls, s: t.Any) -> "Markup": + """Escape a string. Calls :func:`escape` and ensures that for + subclasses the correct type is returned. + """ + rv = escape(s) + + if rv.__class__ is not cls: + return cls(rv) + + return rv + + for method in ( + "__getitem__", + "capitalize", + "title", + "lower", + "upper", + "replace", + "ljust", + "rjust", + "lstrip", + "rstrip", + "center", + "strip", + "translate", + "expandtabs", + "swapcase", + "zfill", + ): + locals()[method] = _simple_escaping_wrapper(method) + + del method + + def partition(self, sep: str) -> t.Tuple["Markup", "Markup", "Markup"]: + l, s, r = super().partition(self.escape(sep)) + cls = self.__class__ + return cls(l), cls(s), cls(r) + + def rpartition(self, sep: str) -> t.Tuple["Markup", "Markup", "Markup"]: + l, s, r = super().rpartition(self.escape(sep)) + cls = self.__class__ + return cls(l), cls(s), cls(r) + + def format(self, *args: t.Any, **kwargs: t.Any) -> "Markup": + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, args, kwargs)) + + def __html_format__(self, format_spec: str) -> "Markup": + if format_spec: + raise ValueError("Unsupported format specification for Markup.") + + return self + + +class EscapeFormatter(string.Formatter): + __slots__ = ("escape",) + + def __init__(self, escape: t.Callable[[t.Any], Markup]) -> None: + self.escape = escape + super().__init__() + + def format_field(self, value: t.Any, format_spec: str) -> str: + if hasattr(value, "__html_format__"): + rv = value.__html_format__(format_spec) + elif hasattr(value, "__html__"): + if format_spec: + raise ValueError( + f"Format specifier {format_spec} given, but {type(value)} does not" + " define __html_format__. A class that defines __html__ must define" + " __html_format__ to work with format specifiers." + ) + rv = value.__html__() + else: + # We need to make sure the format spec is str here as + # otherwise the wrong callback methods are invoked. + rv = string.Formatter.format_field(self, value, str(format_spec)) + return str(self.escape(rv)) + + +_ListOrDict = t.TypeVar("_ListOrDict", list, dict) + + +def _escape_argspec( + obj: _ListOrDict, iterable: t.Iterable[t.Any], escape: t.Callable[[t.Any], Markup] +) -> _ListOrDict: + """Helper for various string-wrapped functions.""" + for key, value in iterable: + if isinstance(value, str) or hasattr(value, "__html__"): + obj[key] = escape(value) + + return obj + + +class _MarkupEscapeHelper: + """Helper for :meth:`Markup.__mod__`.""" + + __slots__ = ("obj", "escape") + + def __init__(self, obj: t.Any, escape: t.Callable[[t.Any], Markup]) -> None: + self.obj = obj + self.escape = escape + + def __getitem__(self, item: t.Any) -> "_MarkupEscapeHelper": + return _MarkupEscapeHelper(self.obj[item], self.escape) + + def __str__(self) -> str: + return str(self.escape(self.obj)) + + def __repr__(self) -> str: + return str(self.escape(repr(self.obj))) + + def __int__(self) -> int: + return int(self.obj) + + def __float__(self) -> float: + return float(self.obj) + + +# circular import +try: + from ._speedups import escape as escape + from ._speedups import escape_silent as escape_silent + from ._speedups import soft_str as soft_str +except ImportError: + from ._native import escape as escape + from ._native import escape_silent as escape_silent # noqa: F401 + from ._native import soft_str as soft_str # noqa: F401 diff --git a/myenv/lib/python3.10/site-packages/markupsafe/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/markupsafe/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..c2ee6e1fe Binary files /dev/null and b/myenv/lib/python3.10/site-packages/markupsafe/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/markupsafe/__pycache__/_native.cpython-310.pyc b/myenv/lib/python3.10/site-packages/markupsafe/__pycache__/_native.cpython-310.pyc new file mode 100644 index 000000000..255ecf68b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/markupsafe/__pycache__/_native.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/markupsafe/_native.py b/myenv/lib/python3.10/site-packages/markupsafe/_native.py new file mode 100644 index 000000000..8117b2716 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/markupsafe/_native.py @@ -0,0 +1,63 @@ +import typing as t + +from . import Markup + + +def escape(s: t.Any) -> Markup: + """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in + the string with HTML-safe sequences. Use this if you need to display + text that might contain such characters in HTML. + + If the object has an ``__html__`` method, it is called and the + return value is assumed to already be safe for HTML. + + :param s: An object to be converted to a string and escaped. + :return: A :class:`Markup` string with the escaped text. + """ + if hasattr(s, "__html__"): + return Markup(s.__html__()) + + return Markup( + str(s) + .replace("&", "&") + .replace(">", ">") + .replace("<", "<") + .replace("'", "'") + .replace('"', """) + ) + + +def escape_silent(s: t.Optional[t.Any]) -> Markup: + """Like :func:`escape` but treats ``None`` as the empty string. + Useful with optional values, as otherwise you get the string + ``'None'`` when the value is ``None``. + + >>> escape(None) + Markup('None') + >>> escape_silent(None) + Markup('') + """ + if s is None: + return Markup() + + return escape(s) + + +def soft_str(s: t.Any) -> str: + """Convert an object to a string if it isn't already. This preserves + a :class:`Markup` string rather than converting it back to a basic + string, so it will still be marked as safe and won't be escaped + again. + + >>> value = escape("") + >>> value + Markup('<User 1>') + >>> escape(str(value)) + Markup('&lt;User 1&gt;') + >>> escape(soft_str(value)) + Markup('<User 1>') + """ + if not isinstance(s, str): + return str(s) + + return s diff --git a/myenv/lib/python3.10/site-packages/markupsafe/_speedups.c b/myenv/lib/python3.10/site-packages/markupsafe/_speedups.c new file mode 100644 index 000000000..3c463fb82 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/markupsafe/_speedups.c @@ -0,0 +1,320 @@ +#include + +static PyObject* markup; + +static int +init_constants(void) +{ + PyObject *module; + + /* import markup type so that we can mark the return value */ + module = PyImport_ImportModule("markupsafe"); + if (!module) + return 0; + markup = PyObject_GetAttrString(module, "Markup"); + Py_DECREF(module); + + return 1; +} + +#define GET_DELTA(inp, inp_end, delta) \ + while (inp < inp_end) { \ + switch (*inp++) { \ + case '"': \ + case '\'': \ + case '&': \ + delta += 4; \ + break; \ + case '<': \ + case '>': \ + delta += 3; \ + break; \ + } \ + } + +#define DO_ESCAPE(inp, inp_end, outp) \ + { \ + Py_ssize_t ncopy = 0; \ + while (inp < inp_end) { \ + switch (*inp) { \ + case '"': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '4'; \ + *outp++ = ';'; \ + break; \ + case '\'': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '9'; \ + *outp++ = ';'; \ + break; \ + case '&': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'a'; \ + *outp++ = 'm'; \ + *outp++ = 'p'; \ + *outp++ = ';'; \ + break; \ + case '<': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'l'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + case '>': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'g'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + default: \ + ncopy++; \ + } \ + inp++; \ + } \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + } + +static PyObject* +escape_unicode_kind1(PyUnicodeObject *in) +{ + Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in); + Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS1 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, + PyUnicode_IS_ASCII(in) ? 127 : 255); + if (!out) + return NULL; + + inp = PyUnicode_1BYTE_DATA(in); + outp = PyUnicode_1BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode_kind2(PyUnicodeObject *in) +{ + Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in); + Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS2 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535); + if (!out) + return NULL; + + inp = PyUnicode_2BYTE_DATA(in); + outp = PyUnicode_2BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + + +static PyObject* +escape_unicode_kind4(PyUnicodeObject *in) +{ + Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in); + Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS4 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111); + if (!out) + return NULL; + + inp = PyUnicode_4BYTE_DATA(in); + outp = PyUnicode_4BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode(PyUnicodeObject *in) +{ + if (PyUnicode_READY(in)) + return NULL; + + switch (PyUnicode_KIND(in)) { + case PyUnicode_1BYTE_KIND: + return escape_unicode_kind1(in); + case PyUnicode_2BYTE_KIND: + return escape_unicode_kind2(in); + case PyUnicode_4BYTE_KIND: + return escape_unicode_kind4(in); + } + assert(0); /* shouldn't happen */ + return NULL; +} + +static PyObject* +escape(PyObject *self, PyObject *text) +{ + static PyObject *id_html; + PyObject *s = NULL, *rv = NULL, *html; + + if (id_html == NULL) { + id_html = PyUnicode_InternFromString("__html__"); + if (id_html == NULL) { + return NULL; + } + } + + /* we don't have to escape integers, bools or floats */ + if (PyLong_CheckExact(text) || + PyFloat_CheckExact(text) || PyBool_Check(text) || + text == Py_None) + return PyObject_CallFunctionObjArgs(markup, text, NULL); + + /* if the object has an __html__ method that performs the escaping */ + html = PyObject_GetAttr(text ,id_html); + if (html) { + s = PyObject_CallObject(html, NULL); + Py_DECREF(html); + if (s == NULL) { + return NULL; + } + /* Convert to Markup object */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; + } + + /* otherwise make the object unicode if it isn't, then escape */ + PyErr_Clear(); + if (!PyUnicode_Check(text)) { + PyObject *unicode = PyObject_Str(text); + if (!unicode) + return NULL; + s = escape_unicode((PyUnicodeObject*)unicode); + Py_DECREF(unicode); + } + else + s = escape_unicode((PyUnicodeObject*)text); + + /* convert the unicode string into a markup object. */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; +} + + +static PyObject* +escape_silent(PyObject *self, PyObject *text) +{ + if (text != Py_None) + return escape(self, text); + return PyObject_CallFunctionObjArgs(markup, NULL); +} + + +static PyObject* +soft_str(PyObject *self, PyObject *s) +{ + if (!PyUnicode_Check(s)) + return PyObject_Str(s); + Py_INCREF(s); + return s; +} + + +static PyMethodDef module_methods[] = { + { + "escape", + (PyCFunction)escape, + METH_O, + "Replace the characters ``&``, ``<``, ``>``, ``'``, and ``\"`` in" + " the string with HTML-safe sequences. Use this if you need to display" + " text that might contain such characters in HTML.\n\n" + "If the object has an ``__html__`` method, it is called and the" + " return value is assumed to already be safe for HTML.\n\n" + ":param s: An object to be converted to a string and escaped.\n" + ":return: A :class:`Markup` string with the escaped text.\n" + }, + { + "escape_silent", + (PyCFunction)escape_silent, + METH_O, + "Like :func:`escape` but treats ``None`` as the empty string." + " Useful with optional values, as otherwise you get the string" + " ``'None'`` when the value is ``None``.\n\n" + ">>> escape(None)\n" + "Markup('None')\n" + ">>> escape_silent(None)\n" + "Markup('')\n" + }, + { + "soft_str", + (PyCFunction)soft_str, + METH_O, + "Convert an object to a string if it isn't already. This preserves" + " a :class:`Markup` string rather than converting it back to a basic" + " string, so it will still be marked as safe and won't be escaped" + " again.\n\n" + ">>> value = escape(\"\")\n" + ">>> value\n" + "Markup('<User 1>')\n" + ">>> escape(str(value))\n" + "Markup('&lt;User 1&gt;')\n" + ">>> escape(soft_str(value))\n" + "Markup('<User 1>')\n" + }, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef module_definition = { + PyModuleDef_HEAD_INIT, + "markupsafe._speedups", + NULL, + -1, + module_methods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__speedups(void) +{ + if (!init_constants()) + return NULL; + + return PyModule_Create(&module_definition); +} diff --git a/myenv/lib/python3.10/site-packages/markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so b/myenv/lib/python3.10/site-packages/markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 000000000..b9241ce39 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so differ diff --git a/myenv/lib/python3.10/site-packages/markupsafe/_speedups.pyi b/myenv/lib/python3.10/site-packages/markupsafe/_speedups.pyi new file mode 100644 index 000000000..f673240f6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/markupsafe/_speedups.pyi @@ -0,0 +1,9 @@ +from typing import Any +from typing import Optional + +from . import Markup + +def escape(s: Any) -> Markup: ... +def escape_silent(s: Optional[Any]) -> Markup: ... +def soft_str(s: Any) -> str: ... +def soft_unicode(s: Any) -> str: ... diff --git a/myenv/lib/python3.10/site-packages/markupsafe/py.typed b/myenv/lib/python3.10/site-packages/markupsafe/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/INSTALLER b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/LICENSE b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/LICENSE new file mode 100644 index 000000000..b8350378e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019-2022, IPython Development Team. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/METADATA b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/METADATA new file mode 100644 index 000000000..3411afd2f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/METADATA @@ -0,0 +1,82 @@ +Metadata-Version: 2.1 +Name: matplotlib-inline +Version: 0.1.6 +Summary: Inline Matplotlib backend for Jupyter +Home-page: https://github.com/ipython/matplotlib-inline +Author: IPython Development Team +Author-email: ipython-dev@scipy.org +License: BSD 3-Clause +Keywords: python,ipython,matplotlib,jupyter +Requires-Python: >=3.5 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: traitlets + +# Matplotlib Inline Back-end for IPython and Jupyter + +This package provides support for matplotlib to display figures directly inline in the Jupyter notebook and related clients, as shown below. + +## Installation + +With conda: + +```bash +conda install -c conda-forge matplotlib-inline +``` + +With pip: + +```bash +pip install matplotlib-inline +``` + +## Usage + +Note that in current versions of JupyterLab and Jupyter Notebook, the explicit use of the `%matplotlib inline` directive is not needed anymore, though other third-party clients may still require it. + +This will produce a figure immediately below: + +```python +%matplotlib inline + +import matplotlib.pyplot as plt +import numpy as np + +x = np.linspace(0, 3*np.pi, 500) +plt.plot(x, np.sin(x**2)) +plt.title('A simple chirp'); +``` + +## License + +Licensed under the terms of the BSD 3-Clause License, by the IPython Development Team (see `LICENSE` file). + +BSD 3-Clause License + +Copyright (c) 2019-2022, IPython Development Team. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/RECORD b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/RECORD new file mode 100644 index 000000000..2584ee1d9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/RECORD @@ -0,0 +1,13 @@ +matplotlib_inline-0.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +matplotlib_inline-0.1.6.dist-info/LICENSE,sha256=hSGwNsZEjg4KpyE9RxO2_e4PTGTJ8yBFD3c0a_XA6OQ,1538 +matplotlib_inline-0.1.6.dist-info/METADATA,sha256=iP6YScmis6YNZibrwn3fvVNtKp0k6ZHpMCbpfEvdGKk,2841 +matplotlib_inline-0.1.6.dist-info/RECORD,, +matplotlib_inline-0.1.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +matplotlib_inline-0.1.6.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +matplotlib_inline-0.1.6.dist-info/top_level.txt,sha256=0ViJqRtJZLIS7IfjHrUgYI6RIvWY0cfk1OjAlp40Zsw,18 +matplotlib_inline/__init__.py,sha256=ibK75tAMh_hr1mTuKaz62oBiVg1azrOT_nclTO5R73k,75 +matplotlib_inline/__pycache__/__init__.cpython-310.pyc,, +matplotlib_inline/__pycache__/backend_inline.cpython-310.pyc,, +matplotlib_inline/__pycache__/config.cpython-310.pyc,, +matplotlib_inline/backend_inline.py,sha256=a4w8cmBHOSUwMJHvLJUOs6pSUlOHEsx0N8HGEAfxsxU,11311 +matplotlib_inline/config.py,sha256=Z87OCv-LStYhOwQ2LmHDNxwkAn9BLFKTbHI7XSAT1Jc,3911 diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/REQUESTED b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/WHEEL b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/WHEEL new file mode 100644 index 000000000..becc9a66e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/top_level.txt b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/top_level.txt new file mode 100644 index 000000000..4b061e800 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline-0.1.6.dist-info/top_level.txt @@ -0,0 +1 @@ +matplotlib_inline diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline/__init__.py b/myenv/lib/python3.10/site-packages/matplotlib_inline/__init__.py new file mode 100644 index 000000000..f7ef1dd1a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline/__init__.py @@ -0,0 +1,2 @@ +from . import backend_inline, config # noqa +__version__ = "0.1.6" # noqa diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..3ee60a06d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/backend_inline.cpython-310.pyc b/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/backend_inline.cpython-310.pyc new file mode 100644 index 000000000..8684e502a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/backend_inline.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/config.cpython-310.pyc b/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/config.cpython-310.pyc new file mode 100644 index 000000000..98b379b9c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/matplotlib_inline/__pycache__/config.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline/backend_inline.py b/myenv/lib/python3.10/site-packages/matplotlib_inline/backend_inline.py new file mode 100644 index 000000000..6bcc1d4a0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline/backend_inline.py @@ -0,0 +1,311 @@ +"""A matplotlib backend for publishing figures via display_data""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the BSD 3-Clause License. + +import matplotlib +from matplotlib import colors +from matplotlib.backends import backend_agg +from matplotlib.backends.backend_agg import FigureCanvasAgg +from matplotlib._pylab_helpers import Gcf +from matplotlib.figure import Figure + +from IPython.core.interactiveshell import InteractiveShell +from IPython.core.getipython import get_ipython +from IPython.core.pylabtools import select_figure_formats +from IPython.display import display + +from .config import InlineBackend + + +def new_figure_manager(num, *args, FigureClass=Figure, **kwargs): + """ + Return a new figure manager for a new figure instance. + + This function is part of the API expected by Matplotlib backends. + """ + return new_figure_manager_given_figure(num, FigureClass(*args, **kwargs)) + + +def new_figure_manager_given_figure(num, figure): + """ + Return a new figure manager for a given figure instance. + + This function is part of the API expected by Matplotlib backends. + """ + manager = backend_agg.new_figure_manager_given_figure(num, figure) + + # Hack: matplotlib FigureManager objects in interacive backends (at least + # in some of them) monkeypatch the figure object and add a .show() method + # to it. This applies the same monkeypatch in order to support user code + # that might expect `.show()` to be part of the official API of figure + # objects. For further reference: + # https://github.com/ipython/ipython/issues/1612 + # https://github.com/matplotlib/matplotlib/issues/835 + + if not hasattr(figure, 'show'): + # Queue up `figure` for display + figure.show = lambda *a: display( + figure, metadata=_fetch_figure_metadata(figure)) + + # If matplotlib was manually set to non-interactive mode, this function + # should be a no-op (otherwise we'll generate duplicate plots, since a user + # who set ioff() manually expects to make separate draw/show calls). + if not matplotlib.is_interactive(): + return manager + + # ensure current figure will be drawn, and each subsequent call + # of draw_if_interactive() moves the active figure to ensure it is + # drawn last + try: + show._to_draw.remove(figure) + except ValueError: + # ensure it only appears in the draw list once + pass + # Queue up the figure for drawing in next show() call + show._to_draw.append(figure) + show._draw_called = True + + return manager + + +def show(close=None, block=None): + """Show all figures as SVG/PNG payloads sent to the IPython clients. + + Parameters + ---------- + close : bool, optional + If true, a ``plt.close('all')`` call is automatically issued after + sending all the figures. If this is set, the figures will entirely + removed from the internal list of figures. + block : Not used. + The `block` parameter is a Matplotlib experimental parameter. + We accept it in the function signature for compatibility with other + backends. + """ + if close is None: + close = InlineBackend.instance().close_figures + try: + for figure_manager in Gcf.get_all_fig_managers(): + display( + figure_manager.canvas.figure, + metadata=_fetch_figure_metadata(figure_manager.canvas.figure) + ) + finally: + show._to_draw = [] + # only call close('all') if any to close + # close triggers gc.collect, which can be slow + if close and Gcf.get_all_fig_managers(): + matplotlib.pyplot.close('all') + + +# This flag will be reset by draw_if_interactive when called +show._draw_called = False +# list of figures to draw when flush_figures is called +show._to_draw = [] + + +def flush_figures(): + """Send all figures that changed + + This is meant to be called automatically and will call show() if, during + prior code execution, there had been any calls to draw_if_interactive. + + This function is meant to be used as a post_execute callback in IPython, + so user-caused errors are handled with showtraceback() instead of being + allowed to raise. If this function is not called from within IPython, + then these exceptions will raise. + """ + if not show._draw_called: + return + + try: + if InlineBackend.instance().close_figures: + # ignore the tracking, just draw and close all figures + try: + return show(True) + except Exception as e: + # safely show traceback if in IPython, else raise + ip = get_ipython() + if ip is None: + raise e + else: + ip.showtraceback() + return + + # exclude any figures that were closed: + active = set([fm.canvas.figure for fm in Gcf.get_all_fig_managers()]) + for fig in [fig for fig in show._to_draw if fig in active]: + try: + display(fig, metadata=_fetch_figure_metadata(fig)) + except Exception as e: + # safely show traceback if in IPython, else raise + ip = get_ipython() + if ip is None: + raise e + else: + ip.showtraceback() + return + finally: + # clear flags for next round + show._to_draw = [] + show._draw_called = False + + +# Changes to matplotlib in version 1.2 requires a mpl backend to supply a default +# figurecanvas. This is set here to a Agg canvas +# See https://github.com/matplotlib/matplotlib/pull/1125 +FigureCanvas = FigureCanvasAgg + + +def configure_inline_support(shell, backend): + """Configure an IPython shell object for matplotlib use. + + Parameters + ---------- + shell : InteractiveShell instance + + backend : matplotlib backend + """ + # If using our svg payload backend, register the post-execution + # function that will pick up the results for display. This can only be + # done with access to the real shell object. + + cfg = InlineBackend.instance(parent=shell) + cfg.shell = shell + if cfg not in shell.configurables: + shell.configurables.append(cfg) + + if backend == 'module://matplotlib_inline.backend_inline': + shell.events.register('post_execute', flush_figures) + + # Save rcParams that will be overwrittern + shell._saved_rcParams = {} + for k in cfg.rc: + shell._saved_rcParams[k] = matplotlib.rcParams[k] + # load inline_rc + matplotlib.rcParams.update(cfg.rc) + new_backend_name = "inline" + else: + try: + shell.events.unregister('post_execute', flush_figures) + except ValueError: + pass + if hasattr(shell, '_saved_rcParams'): + matplotlib.rcParams.update(shell._saved_rcParams) + del shell._saved_rcParams + new_backend_name = "other" + + # only enable the formats once -> don't change the enabled formats (which the user may + # has changed) when getting another "%matplotlib inline" call. + # See https://github.com/ipython/ipykernel/issues/29 + cur_backend = getattr(configure_inline_support, "current_backend", "unset") + if new_backend_name != cur_backend: + # Setup the default figure format + select_figure_formats(shell, cfg.figure_formats, **cfg.print_figure_kwargs) + configure_inline_support.current_backend = new_backend_name + + +def _enable_matplotlib_integration(): + """Enable extra IPython matplotlib integration when we are loaded as the matplotlib backend.""" + from matplotlib import get_backend + ip = get_ipython() + backend = get_backend() + if ip and backend == 'module://%s' % __name__: + from IPython.core.pylabtools import activate_matplotlib + try: + activate_matplotlib(backend) + configure_inline_support(ip, backend) + except (ImportError, AttributeError): + # bugs may cause a circular import on Python 2 + def configure_once(*args): + activate_matplotlib(backend) + configure_inline_support(ip, backend) + ip.events.unregister('post_run_cell', configure_once) + ip.events.register('post_run_cell', configure_once) + + +_enable_matplotlib_integration() + + +def _fetch_figure_metadata(fig): + """Get some metadata to help with displaying a figure.""" + # determine if a background is needed for legibility + if _is_transparent(fig.get_facecolor()): + # the background is transparent + ticksLight = _is_light([label.get_color() + for axes in fig.axes + for axis in (axes.xaxis, axes.yaxis) + for label in axis.get_ticklabels()]) + if ticksLight.size and (ticksLight == ticksLight[0]).all(): + # there are one or more tick labels, all with the same lightness + return {'needs_background': 'dark' if ticksLight[0] else 'light'} + + return None + + +def _is_light(color): + """Determines if a color (or each of a sequence of colors) is light (as + opposed to dark). Based on ITU BT.601 luminance formula (see + https://stackoverflow.com/a/596241).""" + rgbaArr = colors.to_rgba_array(color) + return rgbaArr[:, :3].dot((.299, .587, .114)) > .5 + + +def _is_transparent(color): + """Determine transparency from alpha.""" + rgba = colors.to_rgba(color) + return rgba[3] < .5 + + +def set_matplotlib_formats(*formats, **kwargs): + """Select figure formats for the inline backend. Optionally pass quality for JPEG. + + For example, this enables PNG and JPEG output with a JPEG quality of 90%:: + + In [1]: set_matplotlib_formats('png', 'jpeg', quality=90) + + To set this in your config files use the following:: + + c.InlineBackend.figure_formats = {'png', 'jpeg'} + c.InlineBackend.print_figure_kwargs.update({'quality' : 90}) + + Parameters + ---------- + *formats : strs + One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. + **kwargs + Keyword args will be relayed to ``figure.canvas.print_figure``. + """ + # build kwargs, starting with InlineBackend config + cfg = InlineBackend.instance() + kw = {} + kw.update(cfg.print_figure_kwargs) + kw.update(**kwargs) + shell = InteractiveShell.instance() + select_figure_formats(shell, formats, **kw) + + +def set_matplotlib_close(close=True): + """Set whether the inline backend closes all figures automatically or not. + + By default, the inline backend used in the IPython Notebook will close all + matplotlib figures automatically after each cell is run. This means that + plots in different cells won't interfere. Sometimes, you may want to make + a plot in one cell and then refine it in later cells. This can be accomplished + by:: + + In [1]: set_matplotlib_close(False) + + To set this in your config files use the following:: + + c.InlineBackend.close_figures = False + + Parameters + ---------- + close : bool + Should all matplotlib figures be automatically closed after each cell is + run? + """ + cfg = InlineBackend.instance() + cfg.close_figures = close diff --git a/myenv/lib/python3.10/site-packages/matplotlib_inline/config.py b/myenv/lib/python3.10/site-packages/matplotlib_inline/config.py new file mode 100644 index 000000000..8718babad --- /dev/null +++ b/myenv/lib/python3.10/site-packages/matplotlib_inline/config.py @@ -0,0 +1,100 @@ +"""Configurable for configuring the IPython inline backend + +This module does not import anything from matplotlib. +""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the BSD 3-Clause License. + +from traitlets.config.configurable import SingletonConfigurable +from traitlets import ( + Dict, Instance, Set, Bool, TraitError, Unicode +) + + +# Configurable for inline backend options +def pil_available(): + """Test if PIL/Pillow is available""" + out = False + try: + from PIL import Image # noqa + out = True + except ImportError: + pass + return out + + +# Inherit from InlineBackendConfig for deprecation purposes +class InlineBackendConfig(SingletonConfigurable): + pass + + +class InlineBackend(InlineBackendConfig): + """An object to store configuration of the inline backend.""" + + # While we are deprecating overriding matplotlib defaults out of the + # box, this structure should remain here (empty) for API compatibility + # and the use of other tools that may need it. Specifically Spyder takes + # advantage of it. + # See https://github.com/ipython/ipython/issues/10383 for details. + rc = Dict( + {}, + help="""Dict to manage matplotlib configuration defaults in the inline + backend. As of v0.1.4 IPython/Jupyter do not override defaults out of + the box, but third-party tools may use it to manage rc data. To change + personal defaults for matplotlib, use matplotlib's configuration + tools, or customize this class in your `ipython_config.py` file for + IPython/Jupyter-specific usage.""").tag(config=True) + + figure_formats = Set( + {'png'}, + help="""A set of figure formats to enable: 'png', + 'retina', 'jpeg', 'svg', 'pdf'.""").tag(config=True) + + def _update_figure_formatters(self): + if self.shell is not None: + from IPython.core.pylabtools import select_figure_formats + select_figure_formats(self.shell, self.figure_formats, **self.print_figure_kwargs) + + def _figure_formats_changed(self, name, old, new): + if 'jpg' in new or 'jpeg' in new: + if not pil_available(): + raise TraitError("Requires PIL/Pillow for JPG figures") + self._update_figure_formatters() + + figure_format = Unicode(help="""The figure format to enable (deprecated + use `figure_formats` instead)""").tag(config=True) + + def _figure_format_changed(self, name, old, new): + if new: + self.figure_formats = {new} + + print_figure_kwargs = Dict( + {'bbox_inches': 'tight'}, + help="""Extra kwargs to be passed to fig.canvas.print_figure. + + Logical examples include: bbox_inches, quality (for jpeg figures), etc. + """ + ).tag(config=True) + _print_figure_kwargs_changed = _update_figure_formatters + + close_figures = Bool( + True, + help="""Close all figures at the end of each cell. + + When True, ensures that each cell starts with no active figures, but it + also means that one must keep track of references in order to edit or + redraw figures in subsequent cells. This mode is ideal for the notebook, + where residual plots from other cells might be surprising. + + When False, one must call figure() to create new figures. This means + that gcf() and getfigs() can reference figures created in other cells, + and the active figure can continue to be edited with pylab/pyplot + methods that reference the current active figure. This mode facilitates + iterative editing of figures, and behaves most consistently with + other matplotlib backends, but figure barriers between cells must + be explicit. + """).tag(config=True) + + shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', + allow_none=True) diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/AUTHORS.txt b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/AUTHORS.txt new file mode 100644 index 000000000..9737530ba --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/AUTHORS.txt @@ -0,0 +1,58 @@ +Main Authors +============ + +David Halter (@davidhalter) + +Code Contributors +================= +Alisdair Robertson (@robodair) +Bryan Forbes (@bryanforbes) + + +Code Contributors (to Jedi and therefore possibly to this library) +================================================================== + +Takafumi Arakaki (@tkf) +Danilo Bargen (@dbrgn) +Laurens Van Houtven (@lvh) <_@lvh.cc> +Aldo Stracquadanio (@Astrac) +Jean-Louis Fuchs (@ganwell) +tek (@tek) +Yasha Borevich (@jjay) +Aaron Griffin +andviro (@andviro) +Mike Gilbert (@floppym) +Aaron Meurer (@asmeurer) +Lubos Trilety +Akinori Hattori (@hattya) +srusskih (@srusskih) +Steven Silvester (@blink1073) +Colin Duquesnoy (@ColinDuquesnoy) +Jorgen Schaefer (@jorgenschaefer) +Fredrik Bergroth (@fbergroth) +Mathias Fußenegger (@mfussenegger) +Syohei Yoshida (@syohex) +ppalucky (@ppalucky) +immerrr (@immerrr) immerrr@gmail.com +Albertas Agejevas (@alga) +Savor d'Isavano (@KenetJervet) +Phillip Berndt (@phillipberndt) +Ian Lee (@IanLee1521) +Farkhad Khatamov (@hatamov) +Kevin Kelley (@kelleyk) +Sid Shanker (@squidarth) +Reinoud Elhorst (@reinhrst) +Guido van Rossum (@gvanrossum) +Dmytro Sadovnychyi (@sadovnychyi) +Cristi Burcă (@scribu) +bstaint (@bstaint) +Mathias Rav (@Mortal) +Daniel Fiterman (@dfit99) +Simon Ruggier (@sruggier) +Élie Gouzien (@ElieGouzien) +Tim Gates (@timgates42) +Batuhan Taskaya (@isidentical) +Jocelyn Boullier (@Kazy) + + +Note: (@user) means a github user name. diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/INSTALLER b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/LICENSE.txt b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/LICENSE.txt new file mode 100644 index 000000000..08c41db01 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/LICENSE.txt @@ -0,0 +1,86 @@ +All contributions towards parso are MIT licensed. + +Some Python files have been taken from the standard library and are therefore +PSF licensed. Modifications on these files are dual licensed (both MIT and +PSF). These files are: + +- parso/pgen2/* +- parso/tokenize.py +- parso/token.py +- test/test_pgen2.py + +Also some test files under test/normalizer_issue_files have been copied from +https://github.com/PyCQA/pycodestyle (Expat License == MIT License). + +------------------------------------------------------------------------------- +The MIT License (MIT) + +Copyright (c) <2013-2017> + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +------------------------------------------------------------------------------- + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/METADATA b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/METADATA new file mode 100644 index 000000000..331fef3a4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/METADATA @@ -0,0 +1,281 @@ +Metadata-Version: 2.1 +Name: parso +Version: 0.8.3 +Summary: A Python Parser +Home-page: https://github.com/davidhalter/parso +Author: David Halter +Author-email: davidhalter88@gmail.com +Maintainer: David Halter +Maintainer-email: davidhalter88@gmail.com +License: MIT +Keywords: python parser parsing +Platform: any +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Plugins +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE) +Classifier: Topic :: Utilities +Classifier: Typing :: Typed +Requires-Python: >=3.6 +Provides-Extra: qa +Requires-Dist: flake8 (==3.8.3) ; extra == 'qa' +Requires-Dist: mypy (==0.782) ; extra == 'qa' +Provides-Extra: testing +Requires-Dist: docopt ; extra == 'testing' +Requires-Dist: pytest (<6.0.0) ; extra == 'testing' + +################################################################### +parso - A Python Parser +################################################################### + + +.. image:: https://github.com/davidhalter/parso/workflows/Build/badge.svg?branch=master + :target: https://github.com/davidhalter/parso/actions + :alt: GitHub Actions build status + +.. image:: https://coveralls.io/repos/github/davidhalter/parso/badge.svg?branch=master + :target: https://coveralls.io/github/davidhalter/parso?branch=master + :alt: Coverage Status + +.. image:: https://pepy.tech/badge/parso + :target: https://pepy.tech/project/parso + :alt: PyPI Downloads + +.. image:: https://raw.githubusercontent.com/davidhalter/parso/master/docs/_static/logo_characters.png + +Parso is a Python parser that supports error recovery and round-trip parsing +for different Python versions (in multiple Python versions). Parso is also able +to list multiple syntax errors in your python file. + +Parso has been battle-tested by jedi_. It was pulled out of jedi to be useful +for other projects as well. + +Parso consists of a small API to parse Python and analyse the syntax tree. + +A simple example: + +.. code-block:: python + + >>> import parso + >>> module = parso.parse('hello + 1', version="3.9") + >>> expr = module.children[0] + >>> expr + PythonNode(arith_expr, [, , ]) + >>> print(expr.get_code()) + hello + 1 + >>> name = expr.children[0] + >>> name + + >>> name.end_pos + (1, 5) + >>> expr.end_pos + (1, 9) + +To list multiple issues: + +.. code-block:: python + + >>> grammar = parso.load_grammar() + >>> module = grammar.parse('foo +\nbar\ncontinue') + >>> error1, error2 = grammar.iter_errors(module) + >>> error1.message + 'SyntaxError: invalid syntax' + >>> error2.message + "SyntaxError: 'continue' not properly in loop" + +Resources +========= + +- `Testing `_ +- `PyPI `_ +- `Docs `_ +- Uses `semantic versioning `_ + +Installation +============ + + pip install parso + +Future +====== + +- There will be better support for refactoring and comments. Stay tuned. +- There's a WIP PEP8 validator. It's however not in a good shape, yet. + +Known Issues +============ + +- `async`/`await` are already used as keywords in Python3.6. +- `from __future__ import print_function` is not ignored. + + +Acknowledgements +================ + +- Guido van Rossum (@gvanrossum) for creating the parser generator pgen2 + (originally used in lib2to3). +- `Salome Schneider `_ + for the extremely awesome parso logo. + + +.. _jedi: https://github.com/davidhalter/jedi + + +.. :changelog: + +Changelog +--------- + +Unreleased +++++++++++ + +0.8.3 (2021-11-30) +++++++++++++++++++ + +- Add basic support for Python 3.11 and 3.12 + +0.8.2 (2021-03-30) +++++++++++++++++++ + +- Various small bugfixes + +0.8.1 (2020-12-10) +++++++++++++++++++ + +- Various small bugfixes + +0.8.0 (2020-08-05) +++++++++++++++++++ + +- Dropped Support for Python 2.7, 3.4, 3.5 +- It's possible to use ``pathlib.Path`` objects now in the API +- The stubs are gone, we are now using annotations +- ``namedexpr_test`` nodes are now a proper class called ``NamedExpr`` +- A lot of smaller refactorings + +0.7.1 (2020-07-24) +++++++++++++++++++ + +- Fixed a couple of smaller bugs (mostly syntax error detection in + ``Grammar.iter_errors``) + +This is going to be the last release that supports Python 2.7, 3.4 and 3.5. + +0.7.0 (2020-04-13) +++++++++++++++++++ + +- Fix a lot of annoying bugs in the diff parser. The fuzzer did not find + issues anymore even after running it for more than 24 hours (500k tests). +- Small grammar change: suites can now contain newlines even after a newline. + This should really not matter if you don't use error recovery. It allows for + nicer error recovery. + +0.6.2 (2020-02-27) +++++++++++++++++++ + +- Bugfixes +- Add Grammar.refactor (might still be subject to change until 0.7.0) + +0.6.1 (2020-02-03) +++++++++++++++++++ + +- Add ``parso.normalizer.Issue.end_pos`` to make it possible to know where an + issue ends + +0.6.0 (2020-01-26) +++++++++++++++++++ + +- Dropped Python 2.6/Python 3.3 support +- del_stmt names are now considered as a definition + (for ``name.is_definition()``) +- Bugfixes + +0.5.2 (2019-12-15) +++++++++++++++++++ + +- Add include_setitem to get_definition/is_definition and get_defined_names (#66) +- Fix named expression error listing (#89, #90) +- Fix some f-string tokenizer issues (#93) + +0.5.1 (2019-07-13) +++++++++++++++++++ + +- Fix: Some unicode identifiers were not correctly tokenized +- Fix: Line continuations in f-strings are now working + +0.5.0 (2019-06-20) +++++++++++++++++++ + +- **Breaking Change** comp_for is now called sync_comp_for for all Python + versions to be compatible with the Python 3.8 Grammar +- Added .pyi stubs for a lot of the parso API +- Small FileIO changes + +0.4.0 (2019-04-05) +++++++++++++++++++ + +- Python 3.8 support +- FileIO support, it's now possible to use abstract file IO, support is alpha + +0.3.4 (2019-02-13) ++++++++++++++++++++ + +- Fix an f-string tokenizer error + +0.3.3 (2019-02-06) ++++++++++++++++++++ + +- Fix async errors in the diff parser +- A fix in iter_errors +- This is a very small bugfix release + +0.3.2 (2019-01-24) ++++++++++++++++++++ + +- 20+ bugfixes in the diff parser and 3 in the tokenizer +- A fuzzer for the diff parser, to give confidence that the diff parser is in a + good shape. +- Some bugfixes for f-string + +0.3.1 (2018-07-09) ++++++++++++++++++++ + +- Bugfixes in the diff parser and keyword-only arguments + +0.3.0 (2018-06-30) ++++++++++++++++++++ + +- Rewrote the pgen2 parser generator. + +0.2.1 (2018-05-21) ++++++++++++++++++++ + +- A bugfix for the diff parser. +- Grammar files can now be loaded from a specific path. + +0.2.0 (2018-04-15) ++++++++++++++++++++ + +- f-strings are now parsed as a part of the normal Python grammar. This makes + it way easier to deal with them. + +0.1.1 (2017-11-05) ++++++++++++++++++++ + +- Fixed a few bugs in the caching layer +- Added support for Python 3.7 + +0.1.0 (2017-09-04) ++++++++++++++++++++ + +- Pulling the library out of Jedi. Some APIs will definitely change. + + diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/RECORD b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/RECORD new file mode 100644 index 000000000..8684d804f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/RECORD @@ -0,0 +1,58 @@ +parso-0.8.3.dist-info/AUTHORS.txt,sha256=SDCgu8hXlBBcjPPyUT-SKW20_IM2MxW-95hKFaRIqyI,2029 +parso-0.8.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +parso-0.8.3.dist-info/LICENSE.txt,sha256=-meXMHN1PRdiTK-GhNXugW1wyJ2RLFvKfKDwjnsVDts,4176 +parso-0.8.3.dist-info/METADATA,sha256=pYazDX1eIHikB0A_h_uHmj4Plg7Aj1ZVaErjL5ljg88,7546 +parso-0.8.3.dist-info/RECORD,, +parso-0.8.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +parso-0.8.3.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +parso-0.8.3.dist-info/top_level.txt,sha256=GOOKQCPcnr0_7IRArxyI0CX5LLu4WLlzIRAVWS-vJ4s,6 +parso/__init__.py,sha256=eatQ-us9dJoFPpMfoSsmGXzoZSnMDxuOTEIY1PC3LSc,1607 +parso/__pycache__/__init__.cpython-310.pyc,, +parso/__pycache__/_compatibility.cpython-310.pyc,, +parso/__pycache__/cache.cpython-310.pyc,, +parso/__pycache__/file_io.cpython-310.pyc,, +parso/__pycache__/grammar.cpython-310.pyc,, +parso/__pycache__/normalizer.cpython-310.pyc,, +parso/__pycache__/parser.cpython-310.pyc,, +parso/__pycache__/tree.cpython-310.pyc,, +parso/__pycache__/utils.cpython-310.pyc,, +parso/_compatibility.py,sha256=y-fATJ1dyaoVry175CMDBA088IGTxChkCKD2dUAnsrU,70 +parso/cache.py,sha256=KyQBZdTuBXhDjLmwTSLOgyQoq4NLt_wNr1882DTkOW4,8452 +parso/file_io.py,sha256=2SbXQuMpjAaQ0OYvxZXOgl-oU945-CrIei3eEamWWmk,1023 +parso/grammar.py,sha256=g98usXJOjb2vvEDyoh1MWtWbiOX2XP0wiLmayt_Mxug,10483 +parso/normalizer.py,sha256=geYG9UZQ6ZpafTc_CiXQoBt8VImdBsiNw6_GJLeSGbg,5597 +parso/parser.py,sha256=qlIrRikSxAccfsC6B6Y9sPWyEhR0HIBaCbNveV1OcAE,7182 +parso/pgen2/__init__.py,sha256=kFfRZsSReM49V0YIJ_cG0_TMTew2t4IMbG95KO2BI8E,382 +parso/pgen2/__pycache__/__init__.cpython-310.pyc,, +parso/pgen2/__pycache__/generator.cpython-310.pyc,, +parso/pgen2/__pycache__/grammar_parser.cpython-310.pyc,, +parso/pgen2/generator.py,sha256=5kS5eBjz9sDdavUMwNyw6FtDPhv-PVXiuyDUNC8KiLQ,14570 +parso/pgen2/grammar_parser.py,sha256=knJh3a40_JxUkb0HePG78ZZoqjpPNk3uZwNOz2EkkV4,5515 +parso/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +parso/python/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +parso/python/__pycache__/__init__.cpython-310.pyc,, +parso/python/__pycache__/diff.cpython-310.pyc,, +parso/python/__pycache__/errors.cpython-310.pyc,, +parso/python/__pycache__/parser.cpython-310.pyc,, +parso/python/__pycache__/pep8.cpython-310.pyc,, +parso/python/__pycache__/prefix.cpython-310.pyc,, +parso/python/__pycache__/token.cpython-310.pyc,, +parso/python/__pycache__/tokenize.cpython-310.pyc,, +parso/python/__pycache__/tree.cpython-310.pyc,, +parso/python/diff.py,sha256=jyrqWRKklyPPezZRKRHxoKbhkywiCUoGH_K1HcRSyMA,34206 +parso/python/errors.py,sha256=0nTFd692jKvxHpwklkbz1lRByPFTj_f1xmtVsk0ji2E,47955 +parso/python/grammar310.txt,sha256=QwXaHqJcJ_zgi9FAAbdv1U_kKgcku9UWjHZoClbtpb4,7511 +parso/python/grammar311.txt,sha256=QwXaHqJcJ_zgi9FAAbdv1U_kKgcku9UWjHZoClbtpb4,7511 +parso/python/grammar312.txt,sha256=QwXaHqJcJ_zgi9FAAbdv1U_kKgcku9UWjHZoClbtpb4,7511 +parso/python/grammar36.txt,sha256=ezjXEeLpG9BBMrN0rbM3Z77mcn0XESxSlAaZEy2er-k,6948 +parso/python/grammar37.txt,sha256=Ke73_sTcivtBt2rkJaoNYiXa_zLenhCr96HOVPpZB_E,6804 +parso/python/grammar38.txt,sha256=OhPReVYqhsX2RWyVryca3RUGcvLb-R1dcbwdbgPIvBI,7591 +parso/python/grammar39.txt,sha256=cVrVbF9Pg5UJLFi2tvLetPkG-BOAkpqDa9hqslNjSHU,7499 +parso/python/parser.py,sha256=5OMU32ybPF6kcKUdbcfNNkDOK8hJy0B7fqi6b-Gfwqw,8108 +parso/python/pep8.py,sha256=tsuRslXZvfio8LTBIAbfExjBIT1f3Xjx3igt28fm3G4,33779 +parso/python/prefix.py,sha256=BM93VenBA1Vs-qk2AJSLBMJNn5BDbyVZLIZ5ScT4FIU,2743 +parso/python/token.py,sha256=0dzmQf6L59bEJb9MXYbrDtq3bAHNdTuk-PmOhox81G4,909 +parso/python/tokenize.py,sha256=kqmG8SEdkbLG3Gf6gQyeQZerp4yhzzXX14FCYOZJ5mI,25795 +parso/python/tree.py,sha256=RasTJEVX8G97JgvbAQnClnOm-ZyTmsTADVlHOLPKRBQ,37187 +parso/tree.py,sha256=deZ68uAq0jodEeumJpBYWXWciIXcBYfpsLpe1f1WLO8,16153 +parso/utils.py,sha256=qW8kJuw9pyK8WaIi37FX44kNjAaIgu15EGv7V_R4PmE,6620 diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/REQUESTED b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/WHEEL b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/WHEEL new file mode 100644 index 000000000..ef99c6cf3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/top_level.txt b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/top_level.txt new file mode 100644 index 000000000..0e2334404 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso-0.8.3.dist-info/top_level.txt @@ -0,0 +1 @@ +parso diff --git a/myenv/lib/python3.10/site-packages/parso/__init__.py b/myenv/lib/python3.10/site-packages/parso/__init__.py new file mode 100644 index 000000000..0cceabedc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/__init__.py @@ -0,0 +1,58 @@ +r""" +Parso is a Python parser that supports error recovery and round-trip parsing +for different Python versions (in multiple Python versions). Parso is also able +to list multiple syntax errors in your python file. + +Parso has been battle-tested by jedi_. It was pulled out of jedi to be useful +for other projects as well. + +Parso consists of a small API to parse Python and analyse the syntax tree. + +.. _jedi: https://github.com/davidhalter/jedi + +A simple example: + +>>> import parso +>>> module = parso.parse('hello + 1', version="3.9") +>>> expr = module.children[0] +>>> expr +PythonNode(arith_expr, [, , ]) +>>> print(expr.get_code()) +hello + 1 +>>> name = expr.children[0] +>>> name + +>>> name.end_pos +(1, 5) +>>> expr.end_pos +(1, 9) + +To list multiple issues: + +>>> grammar = parso.load_grammar() +>>> module = grammar.parse('foo +\nbar\ncontinue') +>>> error1, error2 = grammar.iter_errors(module) +>>> error1.message +'SyntaxError: invalid syntax' +>>> error2.message +"SyntaxError: 'continue' not properly in loop" +""" + +from parso.parser import ParserSyntaxError +from parso.grammar import Grammar, load_grammar +from parso.utils import split_lines, python_bytes_to_unicode + + +__version__ = '0.8.3' + + +def parse(code=None, **kwargs): + """ + A utility function to avoid loading grammars. + Params are documented in :py:meth:`parso.Grammar.parse`. + + :param str version: The version used by :py:func:`parso.load_grammar`. + """ + version = kwargs.pop('version', None) + grammar = load_grammar(version=version) + return grammar.parse(code, **kwargs) diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..bdf482e2d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/_compatibility.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/_compatibility.cpython-310.pyc new file mode 100644 index 000000000..96e201c75 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/_compatibility.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/cache.cpython-310.pyc new file mode 100644 index 000000000..5291f678f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/file_io.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/file_io.cpython-310.pyc new file mode 100644 index 000000000..0aa32705f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/file_io.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/grammar.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/grammar.cpython-310.pyc new file mode 100644 index 000000000..ba31e0315 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/grammar.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/normalizer.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/normalizer.cpython-310.pyc new file mode 100644 index 000000000..7378e8233 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/normalizer.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/parser.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/parser.cpython-310.pyc new file mode 100644 index 000000000..c97136320 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/parser.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/tree.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/tree.cpython-310.pyc new file mode 100644 index 000000000..fe618888b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/tree.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/__pycache__/utils.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/__pycache__/utils.cpython-310.pyc new file mode 100644 index 000000000..42c0cca52 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/__pycache__/utils.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/_compatibility.py b/myenv/lib/python3.10/site-packages/parso/_compatibility.py new file mode 100644 index 000000000..58b186fc4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/_compatibility.py @@ -0,0 +1,3 @@ +import platform + +is_pypy = platform.python_implementation() == 'PyPy' diff --git a/myenv/lib/python3.10/site-packages/parso/cache.py b/myenv/lib/python3.10/site-packages/parso/cache.py new file mode 100644 index 000000000..5592a9fdd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/cache.py @@ -0,0 +1,275 @@ +import time +import os +import sys +import hashlib +import gc +import shutil +import platform +import logging +import warnings +import pickle +from pathlib import Path +from typing import Dict, Any + +LOG = logging.getLogger(__name__) + +_CACHED_FILE_MINIMUM_SURVIVAL = 60 * 10 # 10 minutes +""" +Cached files should survive at least a few minutes. +""" + +_CACHED_FILE_MAXIMUM_SURVIVAL = 60 * 60 * 24 * 30 +""" +Maximum time for a cached file to survive if it is not +accessed within. +""" + +_CACHED_SIZE_TRIGGER = 600 +""" +This setting limits the amount of cached files. It's basically a way to start +garbage collection. + +The reasoning for this limit being as big as it is, is the following: + +Numpy, Pandas, Matplotlib and Tensorflow together use about 500 files. This +makes Jedi use ~500mb of memory. Since we might want a bit more than those few +libraries, we just increase it a bit. +""" + +_PICKLE_VERSION = 33 +""" +Version number (integer) for file system cache. + +Increment this number when there are any incompatible changes in +the parser tree classes. For example, the following changes +are regarded as incompatible. + +- A class name is changed. +- A class is moved to another module. +- A __slot__ of a class is changed. +""" + +_VERSION_TAG = '%s-%s%s-%s' % ( + platform.python_implementation(), + sys.version_info[0], + sys.version_info[1], + _PICKLE_VERSION +) +""" +Short name for distinguish Python implementations and versions. + +It's a bit similar to `sys.implementation.cache_tag`. +See: http://docs.python.org/3/library/sys.html#sys.implementation +""" + + +def _get_default_cache_path(): + if platform.system().lower() == 'windows': + dir_ = Path(os.getenv('LOCALAPPDATA') or '~', 'Parso', 'Parso') + elif platform.system().lower() == 'darwin': + dir_ = Path('~', 'Library', 'Caches', 'Parso') + else: + dir_ = Path(os.getenv('XDG_CACHE_HOME') or '~/.cache', 'parso') + return dir_.expanduser() + + +_default_cache_path = _get_default_cache_path() +""" +The path where the cache is stored. + +On Linux, this defaults to ``~/.cache/parso/``, on OS X to +``~/Library/Caches/Parso/`` and on Windows to ``%LOCALAPPDATA%\\Parso\\Parso\\``. +On Linux, if environment variable ``$XDG_CACHE_HOME`` is set, +``$XDG_CACHE_HOME/parso`` is used instead of the default one. +""" + +_CACHE_CLEAR_THRESHOLD = 60 * 60 * 24 + + +def _get_cache_clear_lock_path(cache_path=None): + """ + The path where the cache lock is stored. + + Cache lock will prevent continous cache clearing and only allow garbage + collection once a day (can be configured in _CACHE_CLEAR_THRESHOLD). + """ + cache_path = cache_path or _default_cache_path + return cache_path.joinpath("PARSO-CACHE-LOCK") + + +parser_cache: Dict[str, Any] = {} + + +class _NodeCacheItem: + def __init__(self, node, lines, change_time=None): + self.node = node + self.lines = lines + if change_time is None: + change_time = time.time() + self.change_time = change_time + self.last_used = change_time + + +def load_module(hashed_grammar, file_io, cache_path=None): + """ + Returns a module or None, if it fails. + """ + p_time = file_io.get_last_modified() + if p_time is None: + return None + + try: + module_cache_item = parser_cache[hashed_grammar][file_io.path] + if p_time <= module_cache_item.change_time: + module_cache_item.last_used = time.time() + return module_cache_item.node + except KeyError: + return _load_from_file_system( + hashed_grammar, + file_io.path, + p_time, + cache_path=cache_path + ) + + +def _load_from_file_system(hashed_grammar, path, p_time, cache_path=None): + cache_path = _get_hashed_path(hashed_grammar, path, cache_path=cache_path) + try: + if p_time > os.path.getmtime(cache_path): + # Cache is outdated + return None + + with open(cache_path, 'rb') as f: + gc.disable() + try: + module_cache_item = pickle.load(f) + finally: + gc.enable() + except FileNotFoundError: + return None + else: + _set_cache_item(hashed_grammar, path, module_cache_item) + LOG.debug('pickle loaded: %s', path) + return module_cache_item.node + + +def _set_cache_item(hashed_grammar, path, module_cache_item): + if sum(len(v) for v in parser_cache.values()) >= _CACHED_SIZE_TRIGGER: + # Garbage collection of old cache files. + # We are basically throwing everything away that hasn't been accessed + # in 10 minutes. + cutoff_time = time.time() - _CACHED_FILE_MINIMUM_SURVIVAL + for key, path_to_item_map in parser_cache.items(): + parser_cache[key] = { + path: node_item + for path, node_item in path_to_item_map.items() + if node_item.last_used > cutoff_time + } + + parser_cache.setdefault(hashed_grammar, {})[path] = module_cache_item + + +def try_to_save_module(hashed_grammar, file_io, module, lines, pickling=True, cache_path=None): + path = file_io.path + try: + p_time = None if path is None else file_io.get_last_modified() + except OSError: + p_time = None + pickling = False + + item = _NodeCacheItem(module, lines, p_time) + _set_cache_item(hashed_grammar, path, item) + if pickling and path is not None: + try: + _save_to_file_system(hashed_grammar, path, item, cache_path=cache_path) + except PermissionError: + # It's not really a big issue if the cache cannot be saved to the + # file system. It's still in RAM in that case. However we should + # still warn the user that this is happening. + warnings.warn( + 'Tried to save a file to %s, but got permission denied.' % path, + Warning + ) + else: + _remove_cache_and_update_lock(cache_path=cache_path) + + +def _save_to_file_system(hashed_grammar, path, item, cache_path=None): + with open(_get_hashed_path(hashed_grammar, path, cache_path=cache_path), 'wb') as f: + pickle.dump(item, f, pickle.HIGHEST_PROTOCOL) + + +def clear_cache(cache_path=None): + if cache_path is None: + cache_path = _default_cache_path + shutil.rmtree(cache_path) + parser_cache.clear() + + +def clear_inactive_cache( + cache_path=None, + inactivity_threshold=_CACHED_FILE_MAXIMUM_SURVIVAL, +): + if cache_path is None: + cache_path = _default_cache_path + if not cache_path.exists(): + return False + for dirname in os.listdir(cache_path): + version_path = cache_path.joinpath(dirname) + if not version_path.is_dir(): + continue + for file in os.scandir(version_path): + if file.stat().st_atime + _CACHED_FILE_MAXIMUM_SURVIVAL <= time.time(): + try: + os.remove(file.path) + except OSError: # silently ignore all failures + continue + else: + return True + + +def _touch(path): + try: + os.utime(path, None) + except FileNotFoundError: + try: + file = open(path, 'a') + file.close() + except (OSError, IOError): # TODO Maybe log this? + return False + return True + + +def _remove_cache_and_update_lock(cache_path=None): + lock_path = _get_cache_clear_lock_path(cache_path=cache_path) + try: + clear_lock_time = os.path.getmtime(lock_path) + except FileNotFoundError: + clear_lock_time = None + if ( + clear_lock_time is None # first time + or clear_lock_time + _CACHE_CLEAR_THRESHOLD <= time.time() + ): + if not _touch(lock_path): + # First make sure that as few as possible other cleanup jobs also + # get started. There is still a race condition but it's probably + # not a big problem. + return False + + clear_inactive_cache(cache_path=cache_path) + + +def _get_hashed_path(hashed_grammar, path, cache_path=None): + directory = _get_cache_directory_path(cache_path=cache_path) + + file_hash = hashlib.sha256(str(path).encode("utf-8")).hexdigest() + return os.path.join(directory, '%s-%s.pkl' % (hashed_grammar, file_hash)) + + +def _get_cache_directory_path(cache_path=None): + if cache_path is None: + cache_path = _default_cache_path + directory = cache_path.joinpath(_VERSION_TAG) + if not directory.exists(): + os.makedirs(directory) + return directory diff --git a/myenv/lib/python3.10/site-packages/parso/file_io.py b/myenv/lib/python3.10/site-packages/parso/file_io.py new file mode 100644 index 000000000..568ce9d48 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/file_io.py @@ -0,0 +1,38 @@ +import os +from pathlib import Path +from typing import Union + + +class FileIO: + def __init__(self, path: Union[os.PathLike, str]): + if isinstance(path, str): + path = Path(path) + self.path = path + + def read(self): # Returns bytes/str + # We would like to read unicode here, but we cannot, because we are not + # sure if it is a valid unicode file. Therefore just read whatever is + # here. + with open(self.path, 'rb') as f: + return f.read() + + def get_last_modified(self): + """ + Returns float - timestamp or None, if path doesn't exist. + """ + try: + return os.path.getmtime(self.path) + except FileNotFoundError: + return None + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, self.path) + + +class KnownContentFileIO(FileIO): + def __init__(self, path, content): + super().__init__(path) + self._content = content + + def read(self): + return self._content diff --git a/myenv/lib/python3.10/site-packages/parso/grammar.py b/myenv/lib/python3.10/site-packages/parso/grammar.py new file mode 100644 index 000000000..43b7be940 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/grammar.py @@ -0,0 +1,264 @@ +import hashlib +import os +from typing import Generic, TypeVar, Union, Dict, Optional, Any +from pathlib import Path + +from parso._compatibility import is_pypy +from parso.pgen2 import generate_grammar +from parso.utils import split_lines, python_bytes_to_unicode, \ + PythonVersionInfo, parse_version_string +from parso.python.diff import DiffParser +from parso.python.tokenize import tokenize_lines, tokenize +from parso.python.token import PythonTokenTypes +from parso.cache import parser_cache, load_module, try_to_save_module +from parso.parser import BaseParser +from parso.python.parser import Parser as PythonParser +from parso.python.errors import ErrorFinderConfig +from parso.python import pep8 +from parso.file_io import FileIO, KnownContentFileIO +from parso.normalizer import RefactoringNormalizer, NormalizerConfig + +_loaded_grammars: Dict[str, 'Grammar'] = {} + +_NodeT = TypeVar("_NodeT") + + +class Grammar(Generic[_NodeT]): + """ + :py:func:`parso.load_grammar` returns instances of this class. + + Creating custom none-python grammars by calling this is not supported, yet. + + :param text: A BNF representation of your grammar. + """ + _start_nonterminal: str + _error_normalizer_config: Optional[ErrorFinderConfig] = None + _token_namespace: Any = None + _default_normalizer_config: NormalizerConfig = pep8.PEP8NormalizerConfig() + + def __init__(self, text: str, *, tokenizer, parser=BaseParser, diff_parser=None): + self._pgen_grammar = generate_grammar( + text, + token_namespace=self._get_token_namespace() + ) + self._parser = parser + self._tokenizer = tokenizer + self._diff_parser = diff_parser + self._hashed = hashlib.sha256(text.encode("utf-8")).hexdigest() + + def parse(self, + code: Union[str, bytes] = None, + *, + error_recovery=True, + path: Union[os.PathLike, str] = None, + start_symbol: str = None, + cache=False, + diff_cache=False, + cache_path: Union[os.PathLike, str] = None, + file_io: FileIO = None) -> _NodeT: + """ + If you want to parse a Python file you want to start here, most likely. + + If you need finer grained control over the parsed instance, there will be + other ways to access it. + + :param str code: A unicode or bytes string. When it's not possible to + decode bytes to a string, returns a + :py:class:`UnicodeDecodeError`. + :param bool error_recovery: If enabled, any code will be returned. If + it is invalid, it will be returned as an error node. If disabled, + you will get a ParseError when encountering syntax errors in your + code. + :param str start_symbol: The grammar rule (nonterminal) that you want + to parse. Only allowed to be used when error_recovery is False. + :param str path: The path to the file you want to open. Only needed for caching. + :param bool cache: Keeps a copy of the parser tree in RAM and on disk + if a path is given. Returns the cached trees if the corresponding + files on disk have not changed. Note that this stores pickle files + on your file system (e.g. for Linux in ``~/.cache/parso/``). + :param bool diff_cache: Diffs the cached python module against the new + code and tries to parse only the parts that have changed. Returns + the same (changed) module that is found in cache. Using this option + requires you to not do anything anymore with the cached modules + under that path, because the contents of it might change. This + option is still somewhat experimental. If you want stability, + please don't use it. + :param bool cache_path: If given saves the parso cache in this + directory. If not given, defaults to the default cache places on + each platform. + + :return: A subclass of :py:class:`parso.tree.NodeOrLeaf`. Typically a + :py:class:`parso.python.tree.Module`. + """ + if code is None and path is None and file_io is None: + raise TypeError("Please provide either code or a path.") + + if isinstance(path, str): + path = Path(path) + if isinstance(cache_path, str): + cache_path = Path(cache_path) + + if start_symbol is None: + start_symbol = self._start_nonterminal + + if error_recovery and start_symbol != 'file_input': + raise NotImplementedError("This is currently not implemented.") + + if file_io is None: + if code is None: + file_io = FileIO(path) # type: ignore + else: + file_io = KnownContentFileIO(path, code) + + if cache and file_io.path is not None: + module_node = load_module(self._hashed, file_io, cache_path=cache_path) + if module_node is not None: + return module_node # type: ignore + + if code is None: + code = file_io.read() + code = python_bytes_to_unicode(code) + + lines = split_lines(code, keepends=True) + if diff_cache: + if self._diff_parser is None: + raise TypeError("You have to define a diff parser to be able " + "to use this option.") + try: + module_cache_item = parser_cache[self._hashed][file_io.path] + except KeyError: + pass + else: + module_node = module_cache_item.node + old_lines = module_cache_item.lines + if old_lines == lines: + return module_node # type: ignore + + new_node = self._diff_parser( + self._pgen_grammar, self._tokenizer, module_node + ).update( + old_lines=old_lines, + new_lines=lines + ) + try_to_save_module(self._hashed, file_io, new_node, lines, + # Never pickle in pypy, it's slow as hell. + pickling=cache and not is_pypy, + cache_path=cache_path) + return new_node # type: ignore + + tokens = self._tokenizer(lines) + + p = self._parser( + self._pgen_grammar, + error_recovery=error_recovery, + start_nonterminal=start_symbol + ) + root_node = p.parse(tokens=tokens) + + if cache or diff_cache: + try_to_save_module(self._hashed, file_io, root_node, lines, + # Never pickle in pypy, it's slow as hell. + pickling=cache and not is_pypy, + cache_path=cache_path) + return root_node # type: ignore + + def _get_token_namespace(self): + ns = self._token_namespace + if ns is None: + raise ValueError("The token namespace should be set.") + return ns + + def iter_errors(self, node): + """ + Given a :py:class:`parso.tree.NodeOrLeaf` returns a generator of + :py:class:`parso.normalizer.Issue` objects. For Python this is + a list of syntax/indentation errors. + """ + if self._error_normalizer_config is None: + raise ValueError("No error normalizer specified for this grammar.") + + return self._get_normalizer_issues(node, self._error_normalizer_config) + + def refactor(self, base_node, node_to_str_map): + return RefactoringNormalizer(node_to_str_map).walk(base_node) + + def _get_normalizer(self, normalizer_config): + if normalizer_config is None: + normalizer_config = self._default_normalizer_config + if normalizer_config is None: + raise ValueError("You need to specify a normalizer, because " + "there's no default normalizer for this tree.") + return normalizer_config.create_normalizer(self) + + def _normalize(self, node, normalizer_config=None): + """ + TODO this is not public, yet. + The returned code will be normalized, e.g. PEP8 for Python. + """ + normalizer = self._get_normalizer(normalizer_config) + return normalizer.walk(node) + + def _get_normalizer_issues(self, node, normalizer_config=None): + normalizer = self._get_normalizer(normalizer_config) + normalizer.walk(node) + return normalizer.issues + + def __repr__(self): + nonterminals = self._pgen_grammar.nonterminal_to_dfas.keys() + txt = ' '.join(list(nonterminals)[:3]) + ' ...' + return '<%s:%s>' % (self.__class__.__name__, txt) + + +class PythonGrammar(Grammar): + _error_normalizer_config = ErrorFinderConfig() + _token_namespace = PythonTokenTypes + _start_nonterminal = 'file_input' + + def __init__(self, version_info: PythonVersionInfo, bnf_text: str): + super().__init__( + bnf_text, + tokenizer=self._tokenize_lines, + parser=PythonParser, + diff_parser=DiffParser + ) + self.version_info = version_info + + def _tokenize_lines(self, lines, **kwargs): + return tokenize_lines(lines, version_info=self.version_info, **kwargs) + + def _tokenize(self, code): + # Used by Jedi. + return tokenize(code, version_info=self.version_info) + + +def load_grammar(*, version: str = None, path: str = None): + """ + Loads a :py:class:`parso.Grammar`. The default version is the current Python + version. + + :param str version: A python version string, e.g. ``version='3.8'``. + :param str path: A path to a grammar file + """ + version_info = parse_version_string(version) + + file = path or os.path.join( + 'python', + 'grammar%s%s.txt' % (version_info.major, version_info.minor) + ) + + global _loaded_grammars + path = os.path.join(os.path.dirname(__file__), file) + try: + return _loaded_grammars[path] + except KeyError: + try: + with open(path) as f: + bnf_text = f.read() + + grammar = PythonGrammar(version_info, bnf_text) + return _loaded_grammars.setdefault(path, grammar) + except FileNotFoundError: + message = "Python version %s.%s is currently not supported." % ( + version_info.major, version_info.minor + ) + raise NotImplementedError(message) diff --git a/myenv/lib/python3.10/site-packages/parso/normalizer.py b/myenv/lib/python3.10/site-packages/parso/normalizer.py new file mode 100644 index 000000000..a95f029eb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/normalizer.py @@ -0,0 +1,198 @@ +from contextlib import contextmanager +from typing import Dict, List + + +class _NormalizerMeta(type): + def __new__(cls, name, bases, dct): + new_cls = type.__new__(cls, name, bases, dct) + new_cls.rule_value_classes = {} + new_cls.rule_type_classes = {} + return new_cls + + +class Normalizer(metaclass=_NormalizerMeta): + _rule_type_instances: Dict[str, List[type]] = {} + _rule_value_instances: Dict[str, List[type]] = {} + + def __init__(self, grammar, config): + self.grammar = grammar + self._config = config + self.issues = [] + + self._rule_type_instances = self._instantiate_rules('rule_type_classes') + self._rule_value_instances = self._instantiate_rules('rule_value_classes') + + def _instantiate_rules(self, attr): + dct = {} + for base in type(self).mro(): + rules_map = getattr(base, attr, {}) + for type_, rule_classes in rules_map.items(): + new = [rule_cls(self) for rule_cls in rule_classes] + dct.setdefault(type_, []).extend(new) + return dct + + def walk(self, node): + self.initialize(node) + value = self.visit(node) + self.finalize() + return value + + def visit(self, node): + try: + children = node.children + except AttributeError: + return self.visit_leaf(node) + else: + with self.visit_node(node): + return ''.join(self.visit(child) for child in children) + + @contextmanager + def visit_node(self, node): + self._check_type_rules(node) + yield + + def _check_type_rules(self, node): + for rule in self._rule_type_instances.get(node.type, []): + rule.feed_node(node) + + def visit_leaf(self, leaf): + self._check_type_rules(leaf) + + for rule in self._rule_value_instances.get(leaf.value, []): + rule.feed_node(leaf) + + return leaf.prefix + leaf.value + + def initialize(self, node): + pass + + def finalize(self): + pass + + def add_issue(self, node, code, message): + issue = Issue(node, code, message) + if issue not in self.issues: + self.issues.append(issue) + return True + + @classmethod + def register_rule(cls, *, value=None, values=(), type=None, types=()): + """ + Use it as a class decorator:: + + normalizer = Normalizer('grammar', 'config') + @normalizer.register_rule(value='foo') + class MyRule(Rule): + error_code = 42 + """ + values = list(values) + types = list(types) + if value is not None: + values.append(value) + if type is not None: + types.append(type) + + if not values and not types: + raise ValueError("You must register at least something.") + + def decorator(rule_cls): + for v in values: + cls.rule_value_classes.setdefault(v, []).append(rule_cls) + for t in types: + cls.rule_type_classes.setdefault(t, []).append(rule_cls) + return rule_cls + + return decorator + + +class NormalizerConfig: + normalizer_class = Normalizer + + def create_normalizer(self, grammar): + if self.normalizer_class is None: + return None + + return self.normalizer_class(grammar, self) + + +class Issue: + def __init__(self, node, code, message): + self.code = code + """ + An integer code that stands for the type of error. + """ + self.message = message + """ + A message (string) for the issue. + """ + self.start_pos = node.start_pos + """ + The start position position of the error as a tuple (line, column). As + always in |parso| the first line is 1 and the first column 0. + """ + self.end_pos = node.end_pos + + def __eq__(self, other): + return self.start_pos == other.start_pos and self.code == other.code + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.code, self.start_pos)) + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self.code) + + +class Rule: + code: int + message: str + + def __init__(self, normalizer): + self._normalizer = normalizer + + def is_issue(self, node): + raise NotImplementedError() + + def get_node(self, node): + return node + + def _get_message(self, message, node): + if message is None: + message = self.message + if message is None: + raise ValueError("The message on the class is not set.") + return message + + def add_issue(self, node, code=None, message=None): + if code is None: + code = self.code + if code is None: + raise ValueError("The error code on the class is not set.") + + message = self._get_message(message, node) + + self._normalizer.add_issue(node, code, message) + + def feed_node(self, node): + if self.is_issue(node): + issue_node = self.get_node(node) + self.add_issue(issue_node) + + +class RefactoringNormalizer(Normalizer): + def __init__(self, node_to_str_map): + self._node_to_str_map = node_to_str_map + + def visit(self, node): + try: + return self._node_to_str_map[node] + except KeyError: + return super().visit(node) + + def visit_leaf(self, leaf): + try: + return self._node_to_str_map[leaf] + except KeyError: + return super().visit_leaf(leaf) diff --git a/myenv/lib/python3.10/site-packages/parso/parser.py b/myenv/lib/python3.10/site-packages/parso/parser.py new file mode 100644 index 000000000..37466435d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/parser.py @@ -0,0 +1,210 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Modifications: +# Copyright David Halter and Contributors +# Modifications are dual-licensed: MIT and PSF. +# 99% of the code is different from pgen2, now. + +""" +The ``Parser`` tries to convert the available Python code in an easy to read +format, something like an abstract syntax tree. The classes who represent this +tree, are sitting in the :mod:`parso.tree` module. + +The Python module ``tokenize`` is a very important part in the ``Parser``, +because it splits the code into different words (tokens). Sometimes it looks a +bit messy. Sorry for that! You might ask now: "Why didn't you use the ``ast`` +module for this? Well, ``ast`` does a very good job understanding proper Python +code, but fails to work as soon as there's a single line of broken code. + +There's one important optimization that needs to be known: Statements are not +being parsed completely. ``Statement`` is just a representation of the tokens +within the statement. This lowers memory usage and cpu time and reduces the +complexity of the ``Parser`` (there's another parser sitting inside +``Statement``, which produces ``Array`` and ``Call``). +""" +from typing import Dict, Type + +from parso import tree +from parso.pgen2.generator import ReservedString + + +class ParserSyntaxError(Exception): + """ + Contains error information about the parser tree. + + May be raised as an exception. + """ + def __init__(self, message, error_leaf): + self.message = message + self.error_leaf = error_leaf + + +class InternalParseError(Exception): + """ + Exception to signal the parser is stuck and error recovery didn't help. + Basically this shouldn't happen. It's a sign that something is really + wrong. + """ + + def __init__(self, msg, type_, value, start_pos): + Exception.__init__(self, "%s: type=%r, value=%r, start_pos=%r" % + (msg, type_.name, value, start_pos)) + self.msg = msg + self.type = type + self.value = value + self.start_pos = start_pos + + +class Stack(list): + def _allowed_transition_names_and_token_types(self): + def iterate(): + # An API just for Jedi. + for stack_node in reversed(self): + for transition in stack_node.dfa.transitions: + if isinstance(transition, ReservedString): + yield transition.value + else: + yield transition # A token type + + if not stack_node.dfa.is_final: + break + + return list(iterate()) + + +class StackNode: + def __init__(self, dfa): + self.dfa = dfa + self.nodes = [] + + @property + def nonterminal(self): + return self.dfa.from_rule + + def __repr__(self): + return '%s(%s, %s)' % (self.__class__.__name__, self.dfa, self.nodes) + + +def _token_to_transition(grammar, type_, value): + # Map from token to label + if type_.value.contains_syntax: + # Check for reserved words (keywords) + try: + return grammar.reserved_syntax_strings[value] + except KeyError: + pass + + return type_ + + +class BaseParser: + """Parser engine. + + A Parser instance contains state pertaining to the current token + sequence, and should not be used concurrently by different threads + to parse separate token sequences. + + See python/tokenize.py for how to get input tokens by a string. + + When a syntax error occurs, error_recovery() is called. + """ + + node_map: Dict[str, Type[tree.BaseNode]] = {} + default_node = tree.Node + + leaf_map: Dict[str, Type[tree.Leaf]] = {} + default_leaf = tree.Leaf + + def __init__(self, pgen_grammar, start_nonterminal='file_input', error_recovery=False): + self._pgen_grammar = pgen_grammar + self._start_nonterminal = start_nonterminal + self._error_recovery = error_recovery + + def parse(self, tokens): + first_dfa = self._pgen_grammar.nonterminal_to_dfas[self._start_nonterminal][0] + self.stack = Stack([StackNode(first_dfa)]) + + for token in tokens: + self._add_token(token) + + while True: + tos = self.stack[-1] + if not tos.dfa.is_final: + # We never broke out -- EOF is too soon -- Unfinished statement. + # However, the error recovery might have added the token again, if + # the stack is empty, we're fine. + raise InternalParseError( + "incomplete input", token.type, token.string, token.start_pos + ) + + if len(self.stack) > 1: + self._pop() + else: + return self.convert_node(tos.nonterminal, tos.nodes) + + def error_recovery(self, token): + if self._error_recovery: + raise NotImplementedError("Error Recovery is not implemented") + else: + type_, value, start_pos, prefix = token + error_leaf = tree.ErrorLeaf(type_, value, start_pos, prefix) + raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf) + + def convert_node(self, nonterminal, children): + try: + node = self.node_map[nonterminal](children) + except KeyError: + node = self.default_node(nonterminal, children) + return node + + def convert_leaf(self, type_, value, prefix, start_pos): + try: + return self.leaf_map[type_](value, start_pos, prefix) + except KeyError: + return self.default_leaf(value, start_pos, prefix) + + def _add_token(self, token): + """ + This is the only core function for parsing. Here happens basically + everything. Everything is well prepared by the parser generator and we + only apply the necessary steps here. + """ + grammar = self._pgen_grammar + stack = self.stack + type_, value, start_pos, prefix = token + transition = _token_to_transition(grammar, type_, value) + + while True: + try: + plan = stack[-1].dfa.transitions[transition] + break + except KeyError: + if stack[-1].dfa.is_final: + self._pop() + else: + self.error_recovery(token) + return + except IndexError: + raise InternalParseError("too much input", type_, value, start_pos) + + stack[-1].dfa = plan.next_dfa + + for push in plan.dfa_pushes: + stack.append(StackNode(push)) + + leaf = self.convert_leaf(type_, value, prefix, start_pos) + stack[-1].nodes.append(leaf) + + def _pop(self): + tos = self.stack.pop() + # If there's exactly one child, return that child instead of + # creating a new node. We still create expr_stmt and + # file_input though, because a lot of Jedi depends on its + # logic. + if len(tos.nodes) == 1: + new_node = tos.nodes[0] + else: + new_node = self.convert_node(tos.dfa.from_rule, tos.nodes) + + self.stack[-1].nodes.append(new_node) diff --git a/myenv/lib/python3.10/site-packages/parso/pgen2/__init__.py b/myenv/lib/python3.10/site-packages/parso/pgen2/__init__.py new file mode 100644 index 000000000..d4d9dcdc4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/pgen2/__init__.py @@ -0,0 +1,10 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Modifications: +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. +# Copyright 2014 David Halter and Contributors +# Modifications are dual-licensed: MIT and PSF. + +from parso.pgen2.generator import generate_grammar diff --git a/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..7675ccae1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/generator.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/generator.cpython-310.pyc new file mode 100644 index 000000000..cecfa1be8 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/generator.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/grammar_parser.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/grammar_parser.cpython-310.pyc new file mode 100644 index 000000000..bc15c3dec Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/pgen2/__pycache__/grammar_parser.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/pgen2/generator.py b/myenv/lib/python3.10/site-packages/parso/pgen2/generator.py new file mode 100644 index 000000000..db6e1cb32 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/pgen2/generator.py @@ -0,0 +1,382 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Modifications: +# Copyright David Halter and Contributors +# Modifications are dual-licensed: MIT and PSF. + +""" +This module defines the data structures used to represent a grammar. + +Specifying grammars in pgen is possible with this grammar:: + + grammar: (NEWLINE | rule)* ENDMARKER + rule: NAME ':' rhs NEWLINE + rhs: items ('|' items)* + items: item+ + item: '[' rhs ']' | atom ['+' | '*'] + atom: '(' rhs ')' | NAME | STRING + +This grammar is self-referencing. + +This parser generator (pgen2) was created by Guido Rossum and used for lib2to3. +Most of the code has been refactored to make it more Pythonic. Since this was a +"copy" of the CPython Parser parser "pgen", there was some work needed to make +it more readable. It should also be slightly faster than the original pgen2, +because we made some optimizations. +""" + +from ast import literal_eval +from typing import TypeVar, Generic, Mapping, Sequence, Set, Union + +from parso.pgen2.grammar_parser import GrammarParser, NFAState + +_TokenTypeT = TypeVar("_TokenTypeT") + + +class Grammar(Generic[_TokenTypeT]): + """ + Once initialized, this class supplies the grammar tables for the + parsing engine implemented by parse.py. The parsing engine + accesses the instance variables directly. + + The only important part in this parsers are dfas and transitions between + dfas. + """ + + def __init__(self, + start_nonterminal: str, + rule_to_dfas: Mapping[str, Sequence['DFAState[_TokenTypeT]']], + reserved_syntax_strings: Mapping[str, 'ReservedString']): + self.nonterminal_to_dfas = rule_to_dfas + self.reserved_syntax_strings = reserved_syntax_strings + self.start_nonterminal = start_nonterminal + + +class DFAPlan: + """ + Plans are used for the parser to create stack nodes and do the proper + DFA state transitions. + """ + def __init__(self, next_dfa: 'DFAState', dfa_pushes: Sequence['DFAState'] = []): + self.next_dfa = next_dfa + self.dfa_pushes = dfa_pushes + + def __repr__(self): + return '%s(%s, %s)' % (self.__class__.__name__, self.next_dfa, self.dfa_pushes) + + +class DFAState(Generic[_TokenTypeT]): + """ + The DFAState object is the core class for pretty much anything. DFAState + are the vertices of an ordered graph while arcs and transitions are the + edges. + + Arcs are the initial edges, where most DFAStates are not connected and + transitions are then calculated to connect the DFA state machines that have + different nonterminals. + """ + def __init__(self, from_rule: str, nfa_set: Set[NFAState], final: NFAState): + assert isinstance(nfa_set, set) + assert isinstance(next(iter(nfa_set)), NFAState) + assert isinstance(final, NFAState) + self.from_rule = from_rule + self.nfa_set = nfa_set + # map from terminals/nonterminals to DFAState + self.arcs: Mapping[str, DFAState] = {} + # In an intermediary step we set these nonterminal arcs (which has the + # same structure as arcs). These don't contain terminals anymore. + self.nonterminal_arcs: Mapping[str, DFAState] = {} + + # Transitions are basically the only thing that the parser is using + # with is_final. Everyting else is purely here to create a parser. + self.transitions: Mapping[Union[_TokenTypeT, ReservedString], DFAPlan] = {} + self.is_final = final in nfa_set + + def add_arc(self, next_, label): + assert isinstance(label, str) + assert label not in self.arcs + assert isinstance(next_, DFAState) + self.arcs[label] = next_ + + def unifystate(self, old, new): + for label, next_ in self.arcs.items(): + if next_ is old: + self.arcs[label] = new + + def __eq__(self, other): + # Equality test -- ignore the nfa_set instance variable + assert isinstance(other, DFAState) + if self.is_final != other.is_final: + return False + # Can't just return self.arcs == other.arcs, because that + # would invoke this method recursively, with cycles... + if len(self.arcs) != len(other.arcs): + return False + for label, next_ in self.arcs.items(): + if next_ is not other.arcs.get(label): + return False + return True + + def __repr__(self): + return '<%s: %s is_final=%s>' % ( + self.__class__.__name__, self.from_rule, self.is_final + ) + + +class ReservedString: + """ + Most grammars will have certain keywords and operators that are mentioned + in the grammar as strings (e.g. "if") and not token types (e.g. NUMBER). + This class basically is the former. + """ + + def __init__(self, value: str): + self.value = value + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, self.value) + + +def _simplify_dfas(dfas): + """ + This is not theoretically optimal, but works well enough. + Algorithm: repeatedly look for two states that have the same + set of arcs (same labels pointing to the same nodes) and + unify them, until things stop changing. + + dfas is a list of DFAState instances + """ + changes = True + while changes: + changes = False + for i, state_i in enumerate(dfas): + for j in range(i + 1, len(dfas)): + state_j = dfas[j] + if state_i == state_j: + del dfas[j] + for state in dfas: + state.unifystate(state_j, state_i) + changes = True + break + + +def _make_dfas(start, finish): + """ + Uses the powerset construction algorithm to create DFA states from sets of + NFA states. + + Also does state reduction if some states are not needed. + """ + # To turn an NFA into a DFA, we define the states of the DFA + # to correspond to *sets* of states of the NFA. Then do some + # state reduction. + assert isinstance(start, NFAState) + assert isinstance(finish, NFAState) + + def addclosure(nfa_state, base_nfa_set): + assert isinstance(nfa_state, NFAState) + if nfa_state in base_nfa_set: + return + base_nfa_set.add(nfa_state) + for nfa_arc in nfa_state.arcs: + if nfa_arc.nonterminal_or_string is None: + addclosure(nfa_arc.next, base_nfa_set) + + base_nfa_set = set() + addclosure(start, base_nfa_set) + states = [DFAState(start.from_rule, base_nfa_set, finish)] + for state in states: # NB states grows while we're iterating + arcs = {} + # Find state transitions and store them in arcs. + for nfa_state in state.nfa_set: + for nfa_arc in nfa_state.arcs: + if nfa_arc.nonterminal_or_string is not None: + nfa_set = arcs.setdefault(nfa_arc.nonterminal_or_string, set()) + addclosure(nfa_arc.next, nfa_set) + + # Now create the dfa's with no None's in arcs anymore. All Nones have + # been eliminated and state transitions (arcs) are properly defined, we + # just need to create the dfa's. + for nonterminal_or_string, nfa_set in arcs.items(): + for nested_state in states: + if nested_state.nfa_set == nfa_set: + # The DFA state already exists for this rule. + break + else: + nested_state = DFAState(start.from_rule, nfa_set, finish) + states.append(nested_state) + + state.add_arc(nested_state, nonterminal_or_string) + return states # List of DFAState instances; first one is start + + +def _dump_nfa(start, finish): + print("Dump of NFA for", start.from_rule) + todo = [start] + for i, state in enumerate(todo): + print(" State", i, state is finish and "(final)" or "") + for arc in state.arcs: + label, next_ = arc.nonterminal_or_string, arc.next + if next_ in todo: + j = todo.index(next_) + else: + j = len(todo) + todo.append(next_) + if label is None: + print(" -> %d" % j) + else: + print(" %s -> %d" % (label, j)) + + +def _dump_dfas(dfas): + print("Dump of DFA for", dfas[0].from_rule) + for i, state in enumerate(dfas): + print(" State", i, state.is_final and "(final)" or "") + for nonterminal, next_ in state.arcs.items(): + print(" %s -> %d" % (nonterminal, dfas.index(next_))) + + +def generate_grammar(bnf_grammar: str, token_namespace) -> Grammar: + """ + ``bnf_text`` is a grammar in extended BNF (using * for repetition, + for + at-least-once repetition, [] for optional parts, | for alternatives and () + for grouping). + + It's not EBNF according to ISO/IEC 14977. It's a dialect Python uses in its + own parser. + """ + rule_to_dfas = {} + start_nonterminal = None + for nfa_a, nfa_z in GrammarParser(bnf_grammar).parse(): + # _dump_nfa(nfa_a, nfa_z) + dfas = _make_dfas(nfa_a, nfa_z) + # _dump_dfas(dfas) + # oldlen = len(dfas) + _simplify_dfas(dfas) + # newlen = len(dfas) + rule_to_dfas[nfa_a.from_rule] = dfas + # print(nfa_a.from_rule, oldlen, newlen) + + if start_nonterminal is None: + start_nonterminal = nfa_a.from_rule + + reserved_strings: Mapping[str, ReservedString] = {} + for nonterminal, dfas in rule_to_dfas.items(): + for dfa_state in dfas: + for terminal_or_nonterminal, next_dfa in dfa_state.arcs.items(): + if terminal_or_nonterminal in rule_to_dfas: + dfa_state.nonterminal_arcs[terminal_or_nonterminal] = next_dfa + else: + transition = _make_transition( + token_namespace, + reserved_strings, + terminal_or_nonterminal + ) + dfa_state.transitions[transition] = DFAPlan(next_dfa) + + _calculate_tree_traversal(rule_to_dfas) + return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) # type: ignore + + +def _make_transition(token_namespace, reserved_syntax_strings, label): + """ + Creates a reserved string ("if", "for", "*", ...) or returns the token type + (NUMBER, STRING, ...) for a given grammar terminal. + """ + if label[0].isalpha(): + # A named token (e.g. NAME, NUMBER, STRING) + return getattr(token_namespace, label) + else: + # Either a keyword or an operator + assert label[0] in ('"', "'"), label + assert not label.startswith('"""') and not label.startswith("'''") + value = literal_eval(label) + try: + return reserved_syntax_strings[value] + except KeyError: + r = reserved_syntax_strings[value] = ReservedString(value) + return r + + +def _calculate_tree_traversal(nonterminal_to_dfas): + """ + By this point we know how dfas can move around within a stack node, but we + don't know how we can add a new stack node (nonterminal transitions). + """ + # Map from grammar rule (nonterminal) name to a set of tokens. + first_plans = {} + + nonterminals = list(nonterminal_to_dfas.keys()) + nonterminals.sort() + for nonterminal in nonterminals: + if nonterminal not in first_plans: + _calculate_first_plans(nonterminal_to_dfas, first_plans, nonterminal) + + # Now that we have calculated the first terminals, we are sure that + # there is no left recursion. + + for dfas in nonterminal_to_dfas.values(): + for dfa_state in dfas: + transitions = dfa_state.transitions + for nonterminal, next_dfa in dfa_state.nonterminal_arcs.items(): + for transition, pushes in first_plans[nonterminal].items(): + if transition in transitions: + prev_plan = transitions[transition] + # Make sure these are sorted so that error messages are + # at least deterministic + choices = sorted([ + ( + prev_plan.dfa_pushes[0].from_rule + if prev_plan.dfa_pushes + else prev_plan.next_dfa.from_rule + ), + ( + pushes[0].from_rule + if pushes else next_dfa.from_rule + ), + ]) + raise ValueError( + "Rule %s is ambiguous; given a %s token, we " + "can't determine if we should evaluate %s or %s." + % ( + ( + dfa_state.from_rule, + transition, + ) + tuple(choices) + ) + ) + transitions[transition] = DFAPlan(next_dfa, pushes) + + +def _calculate_first_plans(nonterminal_to_dfas, first_plans, nonterminal): + """ + Calculates the first plan in the first_plans dictionary for every given + nonterminal. This is going to be used to know when to create stack nodes. + """ + dfas = nonterminal_to_dfas[nonterminal] + new_first_plans = {} + first_plans[nonterminal] = None # dummy to detect left recursion + # We only need to check the first dfa. All the following ones are not + # interesting to find first terminals. + state = dfas[0] + for transition, next_ in state.transitions.items(): + # It's a string. We have finally found a possible first token. + new_first_plans[transition] = [next_.next_dfa] + + for nonterminal2, next_ in state.nonterminal_arcs.items(): + # It's a nonterminal and we have either a left recursion issue + # in the grammar or we have to recurse. + try: + first_plans2 = first_plans[nonterminal2] + except KeyError: + first_plans2 = _calculate_first_plans(nonterminal_to_dfas, first_plans, nonterminal2) + else: + if first_plans2 is None: + raise ValueError("left recursion for rule %r" % nonterminal) + + for t, pushes in first_plans2.items(): + new_first_plans[t] = [next_] + pushes + + first_plans[nonterminal] = new_first_plans + return new_first_plans diff --git a/myenv/lib/python3.10/site-packages/parso/pgen2/grammar_parser.py b/myenv/lib/python3.10/site-packages/parso/pgen2/grammar_parser.py new file mode 100644 index 000000000..582efb43a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/pgen2/grammar_parser.py @@ -0,0 +1,160 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Modifications: +# Copyright David Halter and Contributors +# Modifications are dual-licensed: MIT and PSF. +from typing import Optional, Iterator, Tuple, List + +from parso.python.tokenize import tokenize +from parso.utils import parse_version_string +from parso.python.token import PythonTokenTypes + + +class NFAArc: + def __init__(self, next_: 'NFAState', nonterminal_or_string: Optional[str]): + self.next: NFAState = next_ + self.nonterminal_or_string: Optional[str] = nonterminal_or_string + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self.nonterminal_or_string) + + +class NFAState: + def __init__(self, from_rule: str): + self.from_rule: str = from_rule + self.arcs: List[NFAArc] = [] + + def add_arc(self, next_, nonterminal_or_string=None): + assert nonterminal_or_string is None or isinstance(nonterminal_or_string, str) + assert isinstance(next_, NFAState) + self.arcs.append(NFAArc(next_, nonterminal_or_string)) + + def __repr__(self): + return '<%s: from %s>' % (self.__class__.__name__, self.from_rule) + + +class GrammarParser: + """ + The parser for Python grammar files. + """ + def __init__(self, bnf_grammar: str): + self._bnf_grammar = bnf_grammar + self.generator = tokenize( + bnf_grammar, + version_info=parse_version_string('3.9') + ) + self._gettoken() # Initialize lookahead + + def parse(self) -> Iterator[Tuple[NFAState, NFAState]]: + # grammar: (NEWLINE | rule)* ENDMARKER + while self.type != PythonTokenTypes.ENDMARKER: + while self.type == PythonTokenTypes.NEWLINE: + self._gettoken() + + # rule: NAME ':' rhs NEWLINE + self._current_rule_name = self._expect(PythonTokenTypes.NAME) + self._expect(PythonTokenTypes.OP, ':') + + a, z = self._parse_rhs() + self._expect(PythonTokenTypes.NEWLINE) + + yield a, z + + def _parse_rhs(self): + # rhs: items ('|' items)* + a, z = self._parse_items() + if self.value != "|": + return a, z + else: + aa = NFAState(self._current_rule_name) + zz = NFAState(self._current_rule_name) + while True: + # Add the possibility to go into the state of a and come back + # to finish. + aa.add_arc(a) + z.add_arc(zz) + if self.value != "|": + break + + self._gettoken() + a, z = self._parse_items() + return aa, zz + + def _parse_items(self): + # items: item+ + a, b = self._parse_item() + while self.type in (PythonTokenTypes.NAME, PythonTokenTypes.STRING) \ + or self.value in ('(', '['): + c, d = self._parse_item() + # Need to end on the next item. + b.add_arc(c) + b = d + return a, b + + def _parse_item(self): + # item: '[' rhs ']' | atom ['+' | '*'] + if self.value == "[": + self._gettoken() + a, z = self._parse_rhs() + self._expect(PythonTokenTypes.OP, ']') + # Make it also possible that there is no token and change the + # state. + a.add_arc(z) + return a, z + else: + a, z = self._parse_atom() + value = self.value + if value not in ("+", "*"): + return a, z + self._gettoken() + # Make it clear that we can go back to the old state and repeat. + z.add_arc(a) + if value == "+": + return a, z + else: + # The end state is the same as the beginning, nothing must + # change. + return a, a + + def _parse_atom(self): + # atom: '(' rhs ')' | NAME | STRING + if self.value == "(": + self._gettoken() + a, z = self._parse_rhs() + self._expect(PythonTokenTypes.OP, ')') + return a, z + elif self.type in (PythonTokenTypes.NAME, PythonTokenTypes.STRING): + a = NFAState(self._current_rule_name) + z = NFAState(self._current_rule_name) + # Make it clear that the state transition requires that value. + a.add_arc(z, self.value) + self._gettoken() + return a, z + else: + self._raise_error("expected (...) or NAME or STRING, got %s/%s", + self.type, self.value) + + def _expect(self, type_, value=None): + if self.type != type_: + self._raise_error("expected %s, got %s [%s]", + type_, self.type, self.value) + if value is not None and self.value != value: + self._raise_error("expected %s, got %s", value, self.value) + value = self.value + self._gettoken() + return value + + def _gettoken(self): + tup = next(self.generator) + self.type, self.value, self.begin, prefix = tup + + def _raise_error(self, msg, *args): + if args: + try: + msg = msg % args + except: + msg = " ".join([msg] + list(map(str, args))) + line = self._bnf_grammar.splitlines()[self.begin[0] - 1] + raise SyntaxError(msg, ('', self.begin[0], + self.begin[1], line)) diff --git a/myenv/lib/python3.10/site-packages/parso/py.typed b/myenv/lib/python3.10/site-packages/parso/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/parso/python/__init__.py b/myenv/lib/python3.10/site-packages/parso/python/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..d127b9407 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/diff.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/diff.cpython-310.pyc new file mode 100644 index 000000000..0f9f92308 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/diff.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/errors.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/errors.cpython-310.pyc new file mode 100644 index 000000000..d3daf08d5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/errors.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/parser.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/parser.cpython-310.pyc new file mode 100644 index 000000000..5f8c502dc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/parser.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/pep8.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/pep8.cpython-310.pyc new file mode 100644 index 000000000..e881b3d98 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/pep8.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/prefix.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/prefix.cpython-310.pyc new file mode 100644 index 000000000..b7bf09b21 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/prefix.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/token.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/token.cpython-310.pyc new file mode 100644 index 000000000..0d985759d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/token.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/tokenize.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/tokenize.cpython-310.pyc new file mode 100644 index 000000000..fa8323a1f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/tokenize.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/__pycache__/tree.cpython-310.pyc b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/tree.cpython-310.pyc new file mode 100644 index 000000000..a9c40c0c3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/parso/python/__pycache__/tree.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/parso/python/diff.py b/myenv/lib/python3.10/site-packages/parso/python/diff.py new file mode 100644 index 000000000..ba999fa4b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/diff.py @@ -0,0 +1,886 @@ +""" +The diff parser is trying to be a faster version of the normal parser by trying +to reuse the nodes of a previous pass over the same file. This is also called +incremental parsing in parser literature. The difference is mostly that with +incremental parsing you get a range that needs to be reparsed. Here we +calculate that range ourselves by using difflib. After that it's essentially +incremental parsing. + +The biggest issue of this approach is that we reuse nodes in a mutable way. The +intial design and idea is quite problematic for this parser, but it is also +pretty fast. Measurements showed that just copying nodes in Python is simply +quite a bit slower (especially for big files >3 kLOC). Therefore we did not +want to get rid of the mutable nodes, since this is usually not an issue. + +This is by far the hardest software I ever wrote, exactly because the initial +design is crappy. When you have to account for a lot of mutable state, it +creates a ton of issues that you would otherwise not have. This file took +probably 3-6 months to write, which is insane for a parser. + +There is a fuzzer in that helps test this whole thing. Please use it if you +make changes here. If you run the fuzzer like:: + + test/fuzz_diff_parser.py random -n 100000 + +you can be pretty sure that everything is still fine. I sometimes run the +fuzzer up to 24h to make sure everything is still ok. +""" +import re +import difflib +from collections import namedtuple +import logging + +from parso.utils import split_lines +from parso.python.parser import Parser +from parso.python.tree import EndMarker +from parso.python.tokenize import PythonToken, BOM_UTF8_STRING +from parso.python.token import PythonTokenTypes + +LOG = logging.getLogger(__name__) +DEBUG_DIFF_PARSER = False + +_INDENTATION_TOKENS = 'INDENT', 'ERROR_DEDENT', 'DEDENT' + +NEWLINE = PythonTokenTypes.NEWLINE +DEDENT = PythonTokenTypes.DEDENT +NAME = PythonTokenTypes.NAME +ERROR_DEDENT = PythonTokenTypes.ERROR_DEDENT +ENDMARKER = PythonTokenTypes.ENDMARKER + + +def _is_indentation_error_leaf(node): + return node.type == 'error_leaf' and node.token_type in _INDENTATION_TOKENS + + +def _get_previous_leaf_if_indentation(leaf): + while leaf and _is_indentation_error_leaf(leaf): + leaf = leaf.get_previous_leaf() + return leaf + + +def _get_next_leaf_if_indentation(leaf): + while leaf and _is_indentation_error_leaf(leaf): + leaf = leaf.get_next_leaf() + return leaf + + +def _get_suite_indentation(tree_node): + return _get_indentation(tree_node.children[1]) + + +def _get_indentation(tree_node): + return tree_node.start_pos[1] + + +def _assert_valid_graph(node): + """ + Checks if the parent/children relationship is correct. + + This is a check that only runs during debugging/testing. + """ + try: + children = node.children + except AttributeError: + # Ignore INDENT is necessary, because indent/dedent tokens don't + # contain value/prefix and are just around, because of the tokenizer. + if node.type == 'error_leaf' and node.token_type in _INDENTATION_TOKENS: + assert not node.value + assert not node.prefix + return + + # Calculate the content between two start positions. + previous_leaf = _get_previous_leaf_if_indentation(node.get_previous_leaf()) + if previous_leaf is None: + content = node.prefix + previous_start_pos = 1, 0 + else: + assert previous_leaf.end_pos <= node.start_pos, \ + (previous_leaf, node) + + content = previous_leaf.value + node.prefix + previous_start_pos = previous_leaf.start_pos + + if '\n' in content or '\r' in content: + splitted = split_lines(content) + line = previous_start_pos[0] + len(splitted) - 1 + actual = line, len(splitted[-1]) + else: + actual = previous_start_pos[0], previous_start_pos[1] + len(content) + if content.startswith(BOM_UTF8_STRING) \ + and node.get_start_pos_of_prefix() == (1, 0): + # Remove the byte order mark + actual = actual[0], actual[1] - 1 + + assert node.start_pos == actual, (node.start_pos, actual) + else: + for child in children: + assert child.parent == node, (node, child) + _assert_valid_graph(child) + + +def _assert_nodes_are_equal(node1, node2): + try: + children1 = node1.children + except AttributeError: + assert not hasattr(node2, 'children'), (node1, node2) + assert node1.value == node2.value, (node1, node2) + assert node1.type == node2.type, (node1, node2) + assert node1.prefix == node2.prefix, (node1, node2) + assert node1.start_pos == node2.start_pos, (node1, node2) + return + else: + try: + children2 = node2.children + except AttributeError: + assert False, (node1, node2) + for n1, n2 in zip(children1, children2): + _assert_nodes_are_equal(n1, n2) + assert len(children1) == len(children2), '\n' + repr(children1) + '\n' + repr(children2) + + +def _get_debug_error_message(module, old_lines, new_lines): + current_lines = split_lines(module.get_code(), keepends=True) + current_diff = difflib.unified_diff(new_lines, current_lines) + old_new_diff = difflib.unified_diff(old_lines, new_lines) + import parso + return ( + "There's an issue with the diff parser. Please " + "report (parso v%s) - Old/New:\n%s\nActual Diff (May be empty):\n%s" + % (parso.__version__, ''.join(old_new_diff), ''.join(current_diff)) + ) + + +def _get_last_line(node_or_leaf): + last_leaf = node_or_leaf.get_last_leaf() + if _ends_with_newline(last_leaf): + return last_leaf.start_pos[0] + else: + n = last_leaf.get_next_leaf() + if n.type == 'endmarker' and '\n' in n.prefix: + # This is a very special case and has to do with error recovery in + # Parso. The problem is basically that there's no newline leaf at + # the end sometimes (it's required in the grammar, but not needed + # actually before endmarker, CPython just adds a newline to make + # source code pass the parser, to account for that Parso error + # recovery allows small_stmt instead of simple_stmt). + return last_leaf.end_pos[0] + 1 + return last_leaf.end_pos[0] + + +def _skip_dedent_error_leaves(leaf): + while leaf is not None and leaf.type == 'error_leaf' and leaf.token_type == 'DEDENT': + leaf = leaf.get_previous_leaf() + return leaf + + +def _ends_with_newline(leaf, suffix=''): + leaf = _skip_dedent_error_leaves(leaf) + + if leaf.type == 'error_leaf': + typ = leaf.token_type.lower() + else: + typ = leaf.type + + return typ == 'newline' or suffix.endswith('\n') or suffix.endswith('\r') + + +def _flows_finished(pgen_grammar, stack): + """ + if, while, for and try might not be finished, because another part might + still be parsed. + """ + for stack_node in stack: + if stack_node.nonterminal in ('if_stmt', 'while_stmt', 'for_stmt', 'try_stmt'): + return False + return True + + +def _func_or_class_has_suite(node): + if node.type == 'decorated': + node = node.children[-1] + if node.type in ('async_funcdef', 'async_stmt'): + node = node.children[-1] + return node.type in ('classdef', 'funcdef') and node.children[-1].type == 'suite' + + +def _suite_or_file_input_is_valid(pgen_grammar, stack): + if not _flows_finished(pgen_grammar, stack): + return False + + for stack_node in reversed(stack): + if stack_node.nonterminal == 'decorator': + # A decorator is only valid with the upcoming function. + return False + + if stack_node.nonterminal == 'suite': + # If only newline is in the suite, the suite is not valid, yet. + return len(stack_node.nodes) > 1 + # Not reaching a suite means that we're dealing with file_input levels + # where there's no need for a valid statement in it. It can also be empty. + return True + + +def _is_flow_node(node): + if node.type == 'async_stmt': + node = node.children[1] + try: + value = node.children[0].value + except AttributeError: + return False + return value in ('if', 'for', 'while', 'try', 'with') + + +class _PositionUpdatingFinished(Exception): + pass + + +def _update_positions(nodes, line_offset, last_leaf): + for node in nodes: + try: + children = node.children + except AttributeError: + # Is a leaf + node.line += line_offset + if node is last_leaf: + raise _PositionUpdatingFinished + else: + _update_positions(children, line_offset, last_leaf) + + +class DiffParser: + """ + An advanced form of parsing a file faster. Unfortunately comes with huge + side effects. It changes the given module. + """ + def __init__(self, pgen_grammar, tokenizer, module): + self._pgen_grammar = pgen_grammar + self._tokenizer = tokenizer + self._module = module + + def _reset(self): + self._copy_count = 0 + self._parser_count = 0 + + self._nodes_tree = _NodesTree(self._module) + + def update(self, old_lines, new_lines): + ''' + The algorithm works as follows: + + Equal: + - Assure that the start is a newline, otherwise parse until we get + one. + - Copy from parsed_until_line + 1 to max(i2 + 1) + - Make sure that the indentation is correct (e.g. add DEDENT) + - Add old and change positions + Insert: + - Parse from parsed_until_line + 1 to min(j2 + 1), hopefully not + much more. + + Returns the new module node. + ''' + LOG.debug('diff parser start') + # Reset the used names cache so they get regenerated. + self._module._used_names = None + + self._parser_lines_new = new_lines + + self._reset() + + line_length = len(new_lines) + sm = difflib.SequenceMatcher(None, old_lines, self._parser_lines_new) + opcodes = sm.get_opcodes() + LOG.debug('line_lengths old: %s; new: %s' % (len(old_lines), line_length)) + + for operation, i1, i2, j1, j2 in opcodes: + LOG.debug('-> code[%s] old[%s:%s] new[%s:%s]', + operation, i1 + 1, i2, j1 + 1, j2) + + if j2 == line_length and new_lines[-1] == '': + # The empty part after the last newline is not relevant. + j2 -= 1 + + if operation == 'equal': + line_offset = j1 - i1 + self._copy_from_old_parser(line_offset, i1 + 1, i2, j2) + elif operation == 'replace': + self._parse(until_line=j2) + elif operation == 'insert': + self._parse(until_line=j2) + else: + assert operation == 'delete' + + # With this action all change will finally be applied and we have a + # changed module. + self._nodes_tree.close() + + if DEBUG_DIFF_PARSER: + # If there is reasonable suspicion that the diff parser is not + # behaving well, this should be enabled. + try: + code = ''.join(new_lines) + assert self._module.get_code() == code + _assert_valid_graph(self._module) + without_diff_parser_module = Parser( + self._pgen_grammar, + error_recovery=True + ).parse(self._tokenizer(new_lines)) + _assert_nodes_are_equal(self._module, without_diff_parser_module) + except AssertionError: + print(_get_debug_error_message(self._module, old_lines, new_lines)) + raise + + last_pos = self._module.end_pos[0] + if last_pos != line_length: + raise Exception( + ('(%s != %s) ' % (last_pos, line_length)) + + _get_debug_error_message(self._module, old_lines, new_lines) + ) + LOG.debug('diff parser end') + return self._module + + def _enabled_debugging(self, old_lines, lines_new): + if self._module.get_code() != ''.join(lines_new): + LOG.warning('parser issue:\n%s\n%s', ''.join(old_lines), ''.join(lines_new)) + + def _copy_from_old_parser(self, line_offset, start_line_old, until_line_old, until_line_new): + last_until_line = -1 + while until_line_new > self._nodes_tree.parsed_until_line: + parsed_until_line_old = self._nodes_tree.parsed_until_line - line_offset + line_stmt = self._get_old_line_stmt(parsed_until_line_old + 1) + if line_stmt is None: + # Parse 1 line at least. We don't need more, because we just + # want to get into a state where the old parser has statements + # again that can be copied (e.g. not lines within parentheses). + self._parse(self._nodes_tree.parsed_until_line + 1) + else: + p_children = line_stmt.parent.children + index = p_children.index(line_stmt) + + if start_line_old == 1 \ + and p_children[0].get_first_leaf().prefix.startswith(BOM_UTF8_STRING): + # If there's a BOM in the beginning, just reparse. It's too + # complicated to account for it otherwise. + copied_nodes = [] + else: + from_ = self._nodes_tree.parsed_until_line + 1 + copied_nodes = self._nodes_tree.copy_nodes( + p_children[index:], + until_line_old, + line_offset + ) + # Match all the nodes that are in the wanted range. + if copied_nodes: + self._copy_count += 1 + + to = self._nodes_tree.parsed_until_line + + LOG.debug('copy old[%s:%s] new[%s:%s]', + copied_nodes[0].start_pos[0], + copied_nodes[-1].end_pos[0] - 1, from_, to) + else: + # We have copied as much as possible (but definitely not too + # much). Therefore we just parse a bit more. + self._parse(self._nodes_tree.parsed_until_line + 1) + # Since there are potential bugs that might loop here endlessly, we + # just stop here. + assert last_until_line != self._nodes_tree.parsed_until_line, last_until_line + last_until_line = self._nodes_tree.parsed_until_line + + def _get_old_line_stmt(self, old_line): + leaf = self._module.get_leaf_for_position((old_line, 0), include_prefixes=True) + + if _ends_with_newline(leaf): + leaf = leaf.get_next_leaf() + if leaf.get_start_pos_of_prefix()[0] == old_line: + node = leaf + while node.parent.type not in ('file_input', 'suite'): + node = node.parent + + # Make sure that if only the `else:` line of an if statement is + # copied that not the whole thing is going to be copied. + if node.start_pos[0] >= old_line: + return node + # Must be on the same line. Otherwise we need to parse that bit. + return None + + def _parse(self, until_line): + """ + Parses at least until the given line, but might just parse more until a + valid state is reached. + """ + last_until_line = 0 + while until_line > self._nodes_tree.parsed_until_line: + node = self._try_parse_part(until_line) + nodes = node.children + + self._nodes_tree.add_parsed_nodes(nodes, self._keyword_token_indents) + if self._replace_tos_indent is not None: + self._nodes_tree.indents[-1] = self._replace_tos_indent + + LOG.debug( + 'parse_part from %s to %s (to %s in part parser)', + nodes[0].get_start_pos_of_prefix()[0], + self._nodes_tree.parsed_until_line, + node.end_pos[0] - 1 + ) + # Since the tokenizer sometimes has bugs, we cannot be sure that + # this loop terminates. Therefore assert that there's always a + # change. + assert last_until_line != self._nodes_tree.parsed_until_line, last_until_line + last_until_line = self._nodes_tree.parsed_until_line + + def _try_parse_part(self, until_line): + """ + Sets up a normal parser that uses a spezialized tokenizer to only parse + until a certain position (or a bit longer if the statement hasn't + ended. + """ + self._parser_count += 1 + # TODO speed up, shouldn't copy the whole list all the time. + # memoryview? + parsed_until_line = self._nodes_tree.parsed_until_line + lines_after = self._parser_lines_new[parsed_until_line:] + tokens = self._diff_tokenize( + lines_after, + until_line, + line_offset=parsed_until_line + ) + self._active_parser = Parser( + self._pgen_grammar, + error_recovery=True + ) + return self._active_parser.parse(tokens=tokens) + + def _diff_tokenize(self, lines, until_line, line_offset=0): + was_newline = False + indents = self._nodes_tree.indents + initial_indentation_count = len(indents) + + tokens = self._tokenizer( + lines, + start_pos=(line_offset + 1, 0), + indents=indents, + is_first_token=line_offset == 0, + ) + stack = self._active_parser.stack + self._replace_tos_indent = None + self._keyword_token_indents = {} + # print('start', line_offset + 1, indents) + for token in tokens: + # print(token, indents) + typ = token.type + if typ == DEDENT: + if len(indents) < initial_indentation_count: + # We are done here, only thing that can come now is an + # endmarker or another dedented code block. + while True: + typ, string, start_pos, prefix = token = next(tokens) + if typ in (DEDENT, ERROR_DEDENT): + if typ == ERROR_DEDENT: + # We want to force an error dedent in the next + # parser/pass. To make this possible we just + # increase the location by one. + self._replace_tos_indent = start_pos[1] + 1 + pass + else: + break + + if '\n' in prefix or '\r' in prefix: + prefix = re.sub(r'[^\n\r]+\Z', '', prefix) + else: + assert start_pos[1] >= len(prefix), repr(prefix) + if start_pos[1] - len(prefix) == 0: + prefix = '' + yield PythonToken( + ENDMARKER, '', + start_pos, + prefix + ) + break + elif typ == NEWLINE and token.start_pos[0] >= until_line: + was_newline = True + elif was_newline: + was_newline = False + if len(indents) == initial_indentation_count: + # Check if the parser is actually in a valid suite state. + if _suite_or_file_input_is_valid(self._pgen_grammar, stack): + yield PythonToken(ENDMARKER, '', token.start_pos, '') + break + + if typ == NAME and token.string in ('class', 'def'): + self._keyword_token_indents[token.start_pos] = list(indents) + + yield token + + +class _NodesTreeNode: + _ChildrenGroup = namedtuple( + '_ChildrenGroup', + 'prefix children line_offset last_line_offset_leaf') + + def __init__(self, tree_node, parent=None, indentation=0): + self.tree_node = tree_node + self._children_groups = [] + self.parent = parent + self._node_children = [] + self.indentation = indentation + + def finish(self): + children = [] + for prefix, children_part, line_offset, last_line_offset_leaf in self._children_groups: + first_leaf = _get_next_leaf_if_indentation( + children_part[0].get_first_leaf() + ) + + first_leaf.prefix = prefix + first_leaf.prefix + if line_offset != 0: + try: + _update_positions( + children_part, line_offset, last_line_offset_leaf) + except _PositionUpdatingFinished: + pass + children += children_part + self.tree_node.children = children + # Reset the parents + for node in children: + node.parent = self.tree_node + + for node_child in self._node_children: + node_child.finish() + + def add_child_node(self, child_node): + self._node_children.append(child_node) + + def add_tree_nodes(self, prefix, children, line_offset=0, + last_line_offset_leaf=None): + if last_line_offset_leaf is None: + last_line_offset_leaf = children[-1].get_last_leaf() + group = self._ChildrenGroup( + prefix, children, line_offset, last_line_offset_leaf + ) + self._children_groups.append(group) + + def get_last_line(self, suffix): + line = 0 + if self._children_groups: + children_group = self._children_groups[-1] + last_leaf = _get_previous_leaf_if_indentation( + children_group.last_line_offset_leaf + ) + + line = last_leaf.end_pos[0] + children_group.line_offset + + # Newlines end on the next line, which means that they would cover + # the next line. That line is not fully parsed at this point. + if _ends_with_newline(last_leaf, suffix): + line -= 1 + line += len(split_lines(suffix)) - 1 + + if suffix and not suffix.endswith('\n') and not suffix.endswith('\r'): + # This is the end of a file (that doesn't end with a newline). + line += 1 + + if self._node_children: + return max(line, self._node_children[-1].get_last_line(suffix)) + return line + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self.tree_node) + + +class _NodesTree: + def __init__(self, module): + self._base_node = _NodesTreeNode(module) + self._working_stack = [self._base_node] + self._module = module + self._prefix_remainder = '' + self.prefix = '' + self.indents = [0] + + @property + def parsed_until_line(self): + return self._working_stack[-1].get_last_line(self.prefix) + + def _update_insertion_node(self, indentation): + for node in reversed(list(self._working_stack)): + if node.indentation < indentation or node is self._working_stack[0]: + return node + self._working_stack.pop() + + def add_parsed_nodes(self, tree_nodes, keyword_token_indents): + old_prefix = self.prefix + tree_nodes = self._remove_endmarker(tree_nodes) + if not tree_nodes: + self.prefix = old_prefix + self.prefix + return + + assert tree_nodes[0].type != 'newline' + + node = self._update_insertion_node(tree_nodes[0].start_pos[1]) + assert node.tree_node.type in ('suite', 'file_input') + node.add_tree_nodes(old_prefix, tree_nodes) + # tos = Top of stack + self._update_parsed_node_tos(tree_nodes[-1], keyword_token_indents) + + def _update_parsed_node_tos(self, tree_node, keyword_token_indents): + if tree_node.type == 'suite': + def_leaf = tree_node.parent.children[0] + new_tos = _NodesTreeNode( + tree_node, + indentation=keyword_token_indents[def_leaf.start_pos][-1], + ) + new_tos.add_tree_nodes('', list(tree_node.children)) + + self._working_stack[-1].add_child_node(new_tos) + self._working_stack.append(new_tos) + + self._update_parsed_node_tos(tree_node.children[-1], keyword_token_indents) + elif _func_or_class_has_suite(tree_node): + self._update_parsed_node_tos(tree_node.children[-1], keyword_token_indents) + + def _remove_endmarker(self, tree_nodes): + """ + Helps cleaning up the tree nodes that get inserted. + """ + last_leaf = tree_nodes[-1].get_last_leaf() + is_endmarker = last_leaf.type == 'endmarker' + self._prefix_remainder = '' + if is_endmarker: + prefix = last_leaf.prefix + separation = max(prefix.rfind('\n'), prefix.rfind('\r')) + if separation > -1: + # Remove the whitespace part of the prefix after a newline. + # That is not relevant if parentheses were opened. Always parse + # until the end of a line. + last_leaf.prefix, self._prefix_remainder = \ + last_leaf.prefix[:separation + 1], last_leaf.prefix[separation + 1:] + + self.prefix = '' + + if is_endmarker: + self.prefix = last_leaf.prefix + + tree_nodes = tree_nodes[:-1] + return tree_nodes + + def _get_matching_indent_nodes(self, tree_nodes, is_new_suite): + # There might be a random dedent where we have to stop copying. + # Invalid indents are ok, because the parser handled that + # properly before. An invalid dedent can happen, because a few + # lines above there was an invalid indent. + node_iterator = iter(tree_nodes) + if is_new_suite: + yield next(node_iterator) + + first_node = next(node_iterator) + indent = _get_indentation(first_node) + if not is_new_suite and indent not in self.indents: + return + yield first_node + + for n in node_iterator: + if _get_indentation(n) != indent: + return + yield n + + def copy_nodes(self, tree_nodes, until_line, line_offset): + """ + Copies tree nodes from the old parser tree. + + Returns the number of tree nodes that were copied. + """ + if tree_nodes[0].type in ('error_leaf', 'error_node'): + # Avoid copying errors in the beginning. Can lead to a lot of + # issues. + return [] + + indentation = _get_indentation(tree_nodes[0]) + old_working_stack = list(self._working_stack) + old_prefix = self.prefix + old_indents = self.indents + self.indents = [i for i in self.indents if i <= indentation] + + self._update_insertion_node(indentation) + + new_nodes, self._working_stack, self.prefix, added_indents = self._copy_nodes( + list(self._working_stack), + tree_nodes, + until_line, + line_offset, + self.prefix, + ) + if new_nodes: + self.indents += added_indents + else: + self._working_stack = old_working_stack + self.prefix = old_prefix + self.indents = old_indents + return new_nodes + + def _copy_nodes(self, working_stack, nodes, until_line, line_offset, + prefix='', is_nested=False): + new_nodes = [] + added_indents = [] + + nodes = list(self._get_matching_indent_nodes( + nodes, + is_new_suite=is_nested, + )) + + new_prefix = '' + for node in nodes: + if node.start_pos[0] > until_line: + break + + if node.type == 'endmarker': + break + + if node.type == 'error_leaf' and node.token_type in ('DEDENT', 'ERROR_DEDENT'): + break + # TODO this check might take a bit of time for large files. We + # might want to change this to do more intelligent guessing or + # binary search. + if _get_last_line(node) > until_line: + # We can split up functions and classes later. + if _func_or_class_has_suite(node): + new_nodes.append(node) + break + try: + c = node.children + except AttributeError: + pass + else: + # This case basically appears with error recovery of one line + # suites like `def foo(): bar.-`. In this case we might not + # include a newline in the statement and we need to take care + # of that. + n = node + if n.type == 'decorated': + n = n.children[-1] + if n.type in ('async_funcdef', 'async_stmt'): + n = n.children[-1] + if n.type in ('classdef', 'funcdef'): + suite_node = n.children[-1] + else: + suite_node = c[-1] + + if suite_node.type in ('error_leaf', 'error_node'): + break + + new_nodes.append(node) + + # Pop error nodes at the end from the list + if new_nodes: + while new_nodes: + last_node = new_nodes[-1] + if (last_node.type in ('error_leaf', 'error_node') + or _is_flow_node(new_nodes[-1])): + # Error leafs/nodes don't have a defined start/end. Error + # nodes might not end with a newline (e.g. if there's an + # open `(`). Therefore ignore all of them unless they are + # succeeded with valid parser state. + # If we copy flows at the end, they might be continued + # after the copy limit (in the new parser). + # In this while loop we try to remove until we find a newline. + new_prefix = '' + new_nodes.pop() + while new_nodes: + last_node = new_nodes[-1] + if last_node.get_last_leaf().type == 'newline': + break + new_nodes.pop() + continue + if len(new_nodes) > 1 and new_nodes[-2].type == 'error_node': + # The problem here is that Parso error recovery sometimes + # influences nodes before this node. + # Since the new last node is an error node this will get + # cleaned up in the next while iteration. + new_nodes.pop() + continue + break + + if not new_nodes: + return [], working_stack, prefix, added_indents + + tos = working_stack[-1] + last_node = new_nodes[-1] + had_valid_suite_last = False + # Pop incomplete suites from the list + if _func_or_class_has_suite(last_node): + suite = last_node + while suite.type != 'suite': + suite = suite.children[-1] + + indent = _get_suite_indentation(suite) + added_indents.append(indent) + + suite_tos = _NodesTreeNode(suite, indentation=_get_indentation(last_node)) + # Don't need to pass line_offset here, it's already done by the + # parent. + suite_nodes, new_working_stack, new_prefix, ai = self._copy_nodes( + working_stack + [suite_tos], suite.children, until_line, line_offset, + is_nested=True, + ) + added_indents += ai + if len(suite_nodes) < 2: + # A suite only with newline is not valid. + new_nodes.pop() + new_prefix = '' + else: + assert new_nodes + tos.add_child_node(suite_tos) + working_stack = new_working_stack + had_valid_suite_last = True + + if new_nodes: + if not _ends_with_newline(new_nodes[-1].get_last_leaf()) and not had_valid_suite_last: + p = new_nodes[-1].get_next_leaf().prefix + # We are not allowed to remove the newline at the end of the + # line, otherwise it's going to be missing. This happens e.g. + # if a bracket is around before that moves newlines to + # prefixes. + new_prefix = split_lines(p, keepends=True)[0] + + if had_valid_suite_last: + last = new_nodes[-1] + if last.type == 'decorated': + last = last.children[-1] + if last.type in ('async_funcdef', 'async_stmt'): + last = last.children[-1] + last_line_offset_leaf = last.children[-2].get_last_leaf() + assert last_line_offset_leaf == ':' + else: + last_line_offset_leaf = new_nodes[-1].get_last_leaf() + tos.add_tree_nodes( + prefix, new_nodes, line_offset, last_line_offset_leaf, + ) + prefix = new_prefix + self._prefix_remainder = '' + + return new_nodes, working_stack, prefix, added_indents + + def close(self): + self._base_node.finish() + + # Add an endmarker. + try: + last_leaf = self._module.get_last_leaf() + except IndexError: + end_pos = [1, 0] + else: + last_leaf = _skip_dedent_error_leaves(last_leaf) + end_pos = list(last_leaf.end_pos) + lines = split_lines(self.prefix) + assert len(lines) > 0 + if len(lines) == 1: + if lines[0].startswith(BOM_UTF8_STRING) and end_pos == [1, 0]: + end_pos[1] -= 1 + end_pos[1] += len(lines[0]) + else: + end_pos[0] += len(lines) - 1 + end_pos[1] = len(lines[-1]) + + endmarker = EndMarker('', tuple(end_pos), self.prefix + self._prefix_remainder) + endmarker.parent = self._module + self._module.children.append(endmarker) diff --git a/myenv/lib/python3.10/site-packages/parso/python/errors.py b/myenv/lib/python3.10/site-packages/parso/python/errors.py new file mode 100644 index 000000000..5da046ab0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/errors.py @@ -0,0 +1,1295 @@ +# -*- coding: utf-8 -*- +import codecs +import warnings +import re +from contextlib import contextmanager + +from parso.normalizer import Normalizer, NormalizerConfig, Issue, Rule +from parso.python.tokenize import _get_token_collection + +_BLOCK_STMTS = ('if_stmt', 'while_stmt', 'for_stmt', 'try_stmt', 'with_stmt') +_STAR_EXPR_PARENTS = ('testlist_star_expr', 'testlist_comp', 'exprlist') +# This is the maximal block size given by python. +_MAX_BLOCK_SIZE = 20 +_MAX_INDENT_COUNT = 100 +ALLOWED_FUTURES = ( + 'nested_scopes', 'generators', 'division', 'absolute_import', + 'with_statement', 'print_function', 'unicode_literals', 'generator_stop', +) +_COMP_FOR_TYPES = ('comp_for', 'sync_comp_for') + + +def _get_rhs_name(node, version): + type_ = node.type + if type_ == "lambdef": + return "lambda" + elif type_ == "atom": + comprehension = _get_comprehension_type(node) + first, second = node.children[:2] + if comprehension is not None: + return comprehension + elif second.type == "dictorsetmaker": + if version < (3, 8): + return "literal" + else: + if second.children[1] == ":" or second.children[0] == "**": + return "dict display" + else: + return "set display" + elif ( + first == "(" + and (second == ")" + or (len(node.children) == 3 and node.children[1].type == "testlist_comp")) + ): + return "tuple" + elif first == "(": + return _get_rhs_name(_remove_parens(node), version=version) + elif first == "[": + return "list" + elif first == "{" and second == "}": + return "dict display" + elif first == "{" and len(node.children) > 2: + return "set display" + elif type_ == "keyword": + if "yield" in node.value: + return "yield expression" + if version < (3, 8): + return "keyword" + else: + return str(node.value) + elif type_ == "operator" and node.value == "...": + return "Ellipsis" + elif type_ == "comparison": + return "comparison" + elif type_ in ("string", "number", "strings"): + return "literal" + elif type_ == "yield_expr": + return "yield expression" + elif type_ == "test": + return "conditional expression" + elif type_ in ("atom_expr", "power"): + if node.children[0] == "await": + return "await expression" + elif node.children[-1].type == "trailer": + trailer = node.children[-1] + if trailer.children[0] == "(": + return "function call" + elif trailer.children[0] == "[": + return "subscript" + elif trailer.children[0] == ".": + return "attribute" + elif ( + ("expr" in type_ and "star_expr" not in type_) # is a substring + or "_test" in type_ + or type_ in ("term", "factor") + ): + return "operator" + elif type_ == "star_expr": + return "starred" + elif type_ == "testlist_star_expr": + return "tuple" + elif type_ == "fstring": + return "f-string expression" + return type_ # shouldn't reach here + + +def _iter_stmts(scope): + """ + Iterates over all statements and splits up simple_stmt. + """ + for child in scope.children: + if child.type == 'simple_stmt': + for child2 in child.children: + if child2.type == 'newline' or child2 == ';': + continue + yield child2 + else: + yield child + + +def _get_comprehension_type(atom): + first, second = atom.children[:2] + if second.type == 'testlist_comp' and second.children[1].type in _COMP_FOR_TYPES: + if first == '[': + return 'list comprehension' + else: + return 'generator expression' + elif second.type == 'dictorsetmaker' and second.children[-1].type in _COMP_FOR_TYPES: + if second.children[1] == ':': + return 'dict comprehension' + else: + return 'set comprehension' + return None + + +def _is_future_import(import_from): + # It looks like a __future__ import that is relative is still a future + # import. That feels kind of odd, but whatever. + # if import_from.level != 0: + # return False + from_names = import_from.get_from_names() + return [n.value for n in from_names] == ['__future__'] + + +def _remove_parens(atom): + """ + Returns the inner part of an expression like `(foo)`. Also removes nested + parens. + """ + try: + children = atom.children + except AttributeError: + pass + else: + if len(children) == 3 and children[0] == '(': + return _remove_parens(atom.children[1]) + return atom + + +def _skip_parens_bottom_up(node): + """ + Returns an ancestor node of an expression, skipping all levels of parens + bottom-up. + """ + while node.parent is not None: + node = node.parent + if node.type != 'atom' or node.children[0] != '(': + return node + return None + + +def _iter_params(parent_node): + return (n for n in parent_node.children if n.type == 'param' or n.type == 'operator') + + +def _is_future_import_first(import_from): + """ + Checks if the import is the first statement of a file. + """ + found_docstring = False + for stmt in _iter_stmts(import_from.get_root_node()): + if stmt.type == 'string' and not found_docstring: + continue + found_docstring = True + + if stmt == import_from: + return True + if stmt.type == 'import_from' and _is_future_import(stmt): + continue + return False + + +def _iter_definition_exprs_from_lists(exprlist): + def check_expr(child): + if child.type == 'atom': + if child.children[0] == '(': + testlist_comp = child.children[1] + if testlist_comp.type == 'testlist_comp': + yield from _iter_definition_exprs_from_lists(testlist_comp) + return + else: + # It's a paren that doesn't do anything, like 1 + (1) + yield from check_expr(testlist_comp) + return + elif child.children[0] == '[': + yield testlist_comp + return + yield child + + if exprlist.type in _STAR_EXPR_PARENTS: + for child in exprlist.children[::2]: + yield from check_expr(child) + else: + yield from check_expr(exprlist) + + +def _get_expr_stmt_definition_exprs(expr_stmt): + exprs = [] + for list_ in expr_stmt.children[:-2:2]: + if list_.type in ('testlist_star_expr', 'testlist'): + exprs += _iter_definition_exprs_from_lists(list_) + else: + exprs.append(list_) + return exprs + + +def _get_for_stmt_definition_exprs(for_stmt): + exprlist = for_stmt.children[1] + return list(_iter_definition_exprs_from_lists(exprlist)) + + +def _is_argument_comprehension(argument): + return argument.children[1].type in _COMP_FOR_TYPES + + +def _any_fstring_error(version, node): + if version < (3, 9) or node is None: + return False + if node.type == "error_node": + return any(child.type == "fstring_start" for child in node.children) + elif node.type == "fstring": + return True + else: + return node.search_ancestor("fstring") + + +class _Context: + def __init__(self, node, add_syntax_error, parent_context=None): + self.node = node + self.blocks = [] + self.parent_context = parent_context + self._used_name_dict = {} + self._global_names = [] + self._local_params_names = [] + self._nonlocal_names = [] + self._nonlocal_names_in_subscopes = [] + self._add_syntax_error = add_syntax_error + + def is_async_funcdef(self): + # Stupidly enough async funcdefs can have two different forms, + # depending if a decorator is used or not. + return self.is_function() \ + and self.node.parent.type in ('async_funcdef', 'async_stmt') + + def is_function(self): + return self.node.type == 'funcdef' + + def add_name(self, name): + parent_type = name.parent.type + if parent_type == 'trailer': + # We are only interested in first level names. + return + + if parent_type == 'global_stmt': + self._global_names.append(name) + elif parent_type == 'nonlocal_stmt': + self._nonlocal_names.append(name) + elif parent_type == 'funcdef': + self._local_params_names.extend( + [param.name.value for param in name.parent.get_params()] + ) + else: + self._used_name_dict.setdefault(name.value, []).append(name) + + def finalize(self): + """ + Returns a list of nonlocal names that need to be part of that scope. + """ + self._analyze_names(self._global_names, 'global') + self._analyze_names(self._nonlocal_names, 'nonlocal') + + global_name_strs = {n.value: n for n in self._global_names} + for nonlocal_name in self._nonlocal_names: + try: + global_name = global_name_strs[nonlocal_name.value] + except KeyError: + continue + + message = "name '%s' is nonlocal and global" % global_name.value + if global_name.start_pos < nonlocal_name.start_pos: + error_name = global_name + else: + error_name = nonlocal_name + self._add_syntax_error(error_name, message) + + nonlocals_not_handled = [] + for nonlocal_name in self._nonlocal_names_in_subscopes: + search = nonlocal_name.value + if search in self._local_params_names: + continue + if search in global_name_strs or self.parent_context is None: + message = "no binding for nonlocal '%s' found" % nonlocal_name.value + self._add_syntax_error(nonlocal_name, message) + elif not self.is_function() or \ + nonlocal_name.value not in self._used_name_dict: + nonlocals_not_handled.append(nonlocal_name) + return self._nonlocal_names + nonlocals_not_handled + + def _analyze_names(self, globals_or_nonlocals, type_): + def raise_(message): + self._add_syntax_error(base_name, message % (base_name.value, type_)) + + params = [] + if self.node.type == 'funcdef': + params = self.node.get_params() + + for base_name in globals_or_nonlocals: + found_global_or_nonlocal = False + # Somehow Python does it the reversed way. + for name in reversed(self._used_name_dict.get(base_name.value, [])): + if name.start_pos > base_name.start_pos: + # All following names don't have to be checked. + found_global_or_nonlocal = True + + parent = name.parent + if parent.type == 'param' and parent.name == name: + # Skip those here, these definitions belong to the next + # scope. + continue + + if name.is_definition(): + if parent.type == 'expr_stmt' \ + and parent.children[1].type == 'annassign': + if found_global_or_nonlocal: + # If it's after the global the error seems to be + # placed there. + base_name = name + raise_("annotated name '%s' can't be %s") + break + else: + message = "name '%s' is assigned to before %s declaration" + else: + message = "name '%s' is used prior to %s declaration" + + if not found_global_or_nonlocal: + raise_(message) + # Only add an error for the first occurence. + break + + for param in params: + if param.name.value == base_name.value: + raise_("name '%s' is parameter and %s"), + + @contextmanager + def add_block(self, node): + self.blocks.append(node) + yield + self.blocks.pop() + + def add_context(self, node): + return _Context(node, self._add_syntax_error, parent_context=self) + + def close_child_context(self, child_context): + self._nonlocal_names_in_subscopes += child_context.finalize() + + +class ErrorFinder(Normalizer): + """ + Searches for errors in the syntax tree. + """ + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._error_dict = {} + self.version = self.grammar.version_info + + def initialize(self, node): + def create_context(node): + if node is None: + return None + + parent_context = create_context(node.parent) + if node.type in ('classdef', 'funcdef', 'file_input'): + return _Context(node, self._add_syntax_error, parent_context) + return parent_context + + self.context = create_context(node) or _Context(node, self._add_syntax_error) + self._indentation_count = 0 + + def visit(self, node): + if node.type == 'error_node': + with self.visit_node(node): + # Don't need to investigate the inners of an error node. We + # might find errors in there that should be ignored, because + # the error node itself already shows that there's an issue. + return '' + return super().visit(node) + + @contextmanager + def visit_node(self, node): + self._check_type_rules(node) + + if node.type in _BLOCK_STMTS: + with self.context.add_block(node): + if len(self.context.blocks) == _MAX_BLOCK_SIZE: + self._add_syntax_error(node, "too many statically nested blocks") + yield + return + elif node.type == 'suite': + self._indentation_count += 1 + if self._indentation_count == _MAX_INDENT_COUNT: + self._add_indentation_error(node.children[1], "too many levels of indentation") + + yield + + if node.type == 'suite': + self._indentation_count -= 1 + elif node.type in ('classdef', 'funcdef'): + context = self.context + self.context = context.parent_context + self.context.close_child_context(context) + + def visit_leaf(self, leaf): + if leaf.type == 'error_leaf': + if leaf.token_type in ('INDENT', 'ERROR_DEDENT'): + # Indents/Dedents itself never have a prefix. They are just + # "pseudo" tokens that get removed by the syntax tree later. + # Therefore in case of an error we also have to check for this. + spacing = list(leaf.get_next_leaf()._split_prefix())[-1] + if leaf.token_type == 'INDENT': + message = 'unexpected indent' + else: + message = 'unindent does not match any outer indentation level' + self._add_indentation_error(spacing, message) + else: + if leaf.value.startswith('\\'): + message = 'unexpected character after line continuation character' + else: + match = re.match('\\w{,2}("{1,3}|\'{1,3})', leaf.value) + if match is None: + message = 'invalid syntax' + if ( + self.version >= (3, 9) + and leaf.value in _get_token_collection( + self.version + ).always_break_tokens + ): + message = "f-string: " + message + else: + if len(match.group(1)) == 1: + message = 'EOL while scanning string literal' + else: + message = 'EOF while scanning triple-quoted string literal' + self._add_syntax_error(leaf, message) + return '' + elif leaf.value == ':': + parent = leaf.parent + if parent.type in ('classdef', 'funcdef'): + self.context = self.context.add_context(parent) + + # The rest is rule based. + return super().visit_leaf(leaf) + + def _add_indentation_error(self, spacing, message): + self.add_issue(spacing, 903, "IndentationError: " + message) + + def _add_syntax_error(self, node, message): + self.add_issue(node, 901, "SyntaxError: " + message) + + def add_issue(self, node, code, message): + # Overwrite the default behavior. + # Check if the issues are on the same line. + line = node.start_pos[0] + args = (code, message, node) + self._error_dict.setdefault(line, args) + + def finalize(self): + self.context.finalize() + + for code, message, node in self._error_dict.values(): + self.issues.append(Issue(node, code, message)) + + +class IndentationRule(Rule): + code = 903 + + def _get_message(self, message, node): + message = super()._get_message(message, node) + return "IndentationError: " + message + + +@ErrorFinder.register_rule(type='error_node') +class _ExpectIndentedBlock(IndentationRule): + message = 'expected an indented block' + + def get_node(self, node): + leaf = node.get_next_leaf() + return list(leaf._split_prefix())[-1] + + def is_issue(self, node): + # This is the beginning of a suite that is not indented. + return node.children[-1].type == 'newline' + + +class ErrorFinderConfig(NormalizerConfig): + normalizer_class = ErrorFinder + + +class SyntaxRule(Rule): + code = 901 + + def _get_message(self, message, node): + message = super()._get_message(message, node) + if ( + "f-string" not in message + and _any_fstring_error(self._normalizer.version, node) + ): + message = "f-string: " + message + return "SyntaxError: " + message + + +@ErrorFinder.register_rule(type='error_node') +class _InvalidSyntaxRule(SyntaxRule): + message = "invalid syntax" + fstring_message = "f-string: invalid syntax" + + def get_node(self, node): + return node.get_next_leaf() + + def is_issue(self, node): + error = node.get_next_leaf().type != 'error_leaf' + if ( + error + and _any_fstring_error(self._normalizer.version, node) + ): + self.add_issue(node, message=self.fstring_message) + else: + # Error leafs will be added later as an error. + return error + + +@ErrorFinder.register_rule(value='await') +class _AwaitOutsideAsync(SyntaxRule): + message = "'await' outside async function" + + def is_issue(self, leaf): + return not self._normalizer.context.is_async_funcdef() + + def get_error_node(self, node): + # Return the whole await statement. + return node.parent + + +@ErrorFinder.register_rule(value='break') +class _BreakOutsideLoop(SyntaxRule): + message = "'break' outside loop" + + def is_issue(self, leaf): + in_loop = False + for block in self._normalizer.context.blocks: + if block.type in ('for_stmt', 'while_stmt'): + in_loop = True + return not in_loop + + +@ErrorFinder.register_rule(value='continue') +class _ContinueChecks(SyntaxRule): + message = "'continue' not properly in loop" + message_in_finally = "'continue' not supported inside 'finally' clause" + + def is_issue(self, leaf): + in_loop = False + for block in self._normalizer.context.blocks: + if block.type in ('for_stmt', 'while_stmt'): + in_loop = True + if block.type == 'try_stmt': + last_block = block.children[-3] + if ( + last_block == "finally" + and leaf.start_pos > last_block.start_pos + and self._normalizer.version < (3, 8) + ): + self.add_issue(leaf, message=self.message_in_finally) + return False # Error already added + if not in_loop: + return True + + +@ErrorFinder.register_rule(value='from') +class _YieldFromCheck(SyntaxRule): + message = "'yield from' inside async function" + + def get_node(self, leaf): + return leaf.parent.parent # This is the actual yield statement. + + def is_issue(self, leaf): + return leaf.parent.type == 'yield_arg' \ + and self._normalizer.context.is_async_funcdef() + + +@ErrorFinder.register_rule(type='name') +class _NameChecks(SyntaxRule): + message = 'cannot assign to __debug__' + message_none = 'cannot assign to None' + + def is_issue(self, leaf): + self._normalizer.context.add_name(leaf) + + if leaf.value == '__debug__' and leaf.is_definition(): + return True + + +@ErrorFinder.register_rule(type='string') +class _StringChecks(SyntaxRule): + message = "bytes can only contain ASCII literal characters." + + def is_issue(self, leaf): + string_prefix = leaf.string_prefix.lower() + if 'b' in string_prefix \ + and any(c for c in leaf.value if ord(c) > 127): + # b'ä' + return True + + if 'r' not in string_prefix: + # Raw strings don't need to be checked if they have proper + # escaping. + + payload = leaf._get_payload() + if 'b' in string_prefix: + payload = payload.encode('utf-8') + func = codecs.escape_decode + else: + func = codecs.unicode_escape_decode + + try: + with warnings.catch_warnings(): + # The warnings from parsing strings are not relevant. + warnings.filterwarnings('ignore') + func(payload) + except UnicodeDecodeError as e: + self.add_issue(leaf, message='(unicode error) ' + str(e)) + except ValueError as e: + self.add_issue(leaf, message='(value error) ' + str(e)) + + +@ErrorFinder.register_rule(value='*') +class _StarCheck(SyntaxRule): + message = "named arguments must follow bare *" + + def is_issue(self, leaf): + params = leaf.parent + if params.type == 'parameters' and params: + after = params.children[params.children.index(leaf) + 1:] + after = [child for child in after + if child not in (',', ')') and not child.star_count] + return len(after) == 0 + + +@ErrorFinder.register_rule(value='**') +class _StarStarCheck(SyntaxRule): + # e.g. {**{} for a in [1]} + # TODO this should probably get a better end_pos including + # the next sibling of leaf. + message = "dict unpacking cannot be used in dict comprehension" + + def is_issue(self, leaf): + if leaf.parent.type == 'dictorsetmaker': + comp_for = leaf.get_next_sibling().get_next_sibling() + return comp_for is not None and comp_for.type in _COMP_FOR_TYPES + + +@ErrorFinder.register_rule(value='yield') +@ErrorFinder.register_rule(value='return') +class _ReturnAndYieldChecks(SyntaxRule): + message = "'return' with value in async generator" + message_async_yield = "'yield' inside async function" + + def get_node(self, leaf): + return leaf.parent + + def is_issue(self, leaf): + if self._normalizer.context.node.type != 'funcdef': + self.add_issue(self.get_node(leaf), message="'%s' outside function" % leaf.value) + elif self._normalizer.context.is_async_funcdef() \ + and any(self._normalizer.context.node.iter_yield_exprs()): + if leaf.value == 'return' and leaf.parent.type == 'return_stmt': + return True + + +@ErrorFinder.register_rule(type='strings') +class _BytesAndStringMix(SyntaxRule): + # e.g. 's' b'' + message = "cannot mix bytes and nonbytes literals" + + def _is_bytes_literal(self, string): + if string.type == 'fstring': + return False + return 'b' in string.string_prefix.lower() + + def is_issue(self, node): + first = node.children[0] + first_is_bytes = self._is_bytes_literal(first) + for string in node.children[1:]: + if first_is_bytes != self._is_bytes_literal(string): + return True + + +@ErrorFinder.register_rule(type='import_as_names') +class _TrailingImportComma(SyntaxRule): + # e.g. from foo import a, + message = "trailing comma not allowed without surrounding parentheses" + + def is_issue(self, node): + if node.children[-1] == ',' and node.parent.children[-1] != ')': + return True + + +@ErrorFinder.register_rule(type='import_from') +class _ImportStarInFunction(SyntaxRule): + message = "import * only allowed at module level" + + def is_issue(self, node): + return node.is_star_import() and self._normalizer.context.parent_context is not None + + +@ErrorFinder.register_rule(type='import_from') +class _FutureImportRule(SyntaxRule): + message = "from __future__ imports must occur at the beginning of the file" + + def is_issue(self, node): + if _is_future_import(node): + if not _is_future_import_first(node): + return True + + for from_name, future_name in node.get_paths(): + name = future_name.value + allowed_futures = list(ALLOWED_FUTURES) + if self._normalizer.version >= (3, 7): + allowed_futures.append('annotations') + if name == 'braces': + self.add_issue(node, message="not a chance") + elif name == 'barry_as_FLUFL': + m = "Seriously I'm not implementing this :) ~ Dave" + self.add_issue(node, message=m) + elif name not in allowed_futures: + message = "future feature %s is not defined" % name + self.add_issue(node, message=message) + + +@ErrorFinder.register_rule(type='star_expr') +class _StarExprRule(SyntaxRule): + message_iterable_unpacking = "iterable unpacking cannot be used in comprehension" + + def is_issue(self, node): + def check_delete_starred(node): + while node.parent is not None: + node = node.parent + if node.type == 'del_stmt': + return True + if node.type not in (*_STAR_EXPR_PARENTS, 'atom'): + return False + return False + + if self._normalizer.version >= (3, 9): + ancestor = node.parent + else: + ancestor = _skip_parens_bottom_up(node) + # starred expression not in tuple/list/set + if ancestor.type not in (*_STAR_EXPR_PARENTS, 'dictorsetmaker') \ + and not (ancestor.type == 'atom' and ancestor.children[0] != '('): + self.add_issue(node, message="can't use starred expression here") + return + + if check_delete_starred(node): + if self._normalizer.version >= (3, 9): + self.add_issue(node, message="cannot delete starred") + else: + self.add_issue(node, message="can't use starred expression here") + return + + if node.parent.type == 'testlist_comp': + # [*[] for a in [1]] + if node.parent.children[1].type in _COMP_FOR_TYPES: + self.add_issue(node, message=self.message_iterable_unpacking) + + +@ErrorFinder.register_rule(types=_STAR_EXPR_PARENTS) +class _StarExprParentRule(SyntaxRule): + def is_issue(self, node): + def is_definition(node, ancestor): + if ancestor is None: + return False + + type_ = ancestor.type + if type_ == 'trailer': + return False + + if type_ == 'expr_stmt': + return node.start_pos < ancestor.children[-1].start_pos + + return is_definition(node, ancestor.parent) + + if is_definition(node, node.parent): + args = [c for c in node.children if c != ','] + starred = [c for c in args if c.type == 'star_expr'] + if len(starred) > 1: + if self._normalizer.version < (3, 9): + message = "two starred expressions in assignment" + else: + message = "multiple starred expressions in assignment" + self.add_issue(starred[1], message=message) + elif starred: + count = args.index(starred[0]) + if count >= 256: + message = "too many expressions in star-unpacking assignment" + self.add_issue(starred[0], message=message) + + +@ErrorFinder.register_rule(type='annassign') +class _AnnotatorRule(SyntaxRule): + # True: int + # {}: float + message = "illegal target for annotation" + + def get_node(self, node): + return node.parent + + def is_issue(self, node): + type_ = None + lhs = node.parent.children[0] + lhs = _remove_parens(lhs) + try: + children = lhs.children + except AttributeError: + pass + else: + if ',' in children or lhs.type == 'atom' and children[0] == '(': + type_ = 'tuple' + elif lhs.type == 'atom' and children[0] == '[': + type_ = 'list' + trailer = children[-1] + + if type_ is None: + if not (lhs.type == 'name' + # subscript/attributes are allowed + or lhs.type in ('atom_expr', 'power') + and trailer.type == 'trailer' + and trailer.children[0] != '('): + return True + else: + # x, y: str + message = "only single target (not %s) can be annotated" + self.add_issue(lhs.parent, message=message % type_) + + +@ErrorFinder.register_rule(type='argument') +class _ArgumentRule(SyntaxRule): + def is_issue(self, node): + first = node.children[0] + if self._normalizer.version < (3, 8): + # a((b)=c) is valid in <3.8 + first = _remove_parens(first) + if node.children[1] == '=' and first.type != 'name': + if first.type == 'lambdef': + # f(lambda: 1=1) + if self._normalizer.version < (3, 8): + message = "lambda cannot contain assignment" + else: + message = 'expression cannot contain assignment, perhaps you meant "=="?' + else: + # f(+x=1) + if self._normalizer.version < (3, 8): + message = "keyword can't be an expression" + else: + message = 'expression cannot contain assignment, perhaps you meant "=="?' + self.add_issue(first, message=message) + + if _is_argument_comprehension(node) and node.parent.type == 'classdef': + self.add_issue(node, message='invalid syntax') + + +@ErrorFinder.register_rule(type='nonlocal_stmt') +class _NonlocalModuleLevelRule(SyntaxRule): + message = "nonlocal declaration not allowed at module level" + + def is_issue(self, node): + return self._normalizer.context.parent_context is None + + +@ErrorFinder.register_rule(type='arglist') +class _ArglistRule(SyntaxRule): + @property + def message(self): + if self._normalizer.version < (3, 7): + return "Generator expression must be parenthesized if not sole argument" + else: + return "Generator expression must be parenthesized" + + def is_issue(self, node): + arg_set = set() + kw_only = False + kw_unpacking_only = False + for argument in node.children: + if argument == ',': + continue + + if argument.type == 'argument': + first = argument.children[0] + if _is_argument_comprehension(argument) and len(node.children) >= 2: + # a(a, b for b in c) + return True + + if first in ('*', '**'): + if first == '*': + if kw_unpacking_only: + # foo(**kwargs, *args) + message = "iterable argument unpacking " \ + "follows keyword argument unpacking" + self.add_issue(argument, message=message) + else: + kw_unpacking_only = True + else: # Is a keyword argument. + kw_only = True + if first.type == 'name': + if first.value in arg_set: + # f(x=1, x=2) + message = "keyword argument repeated" + if self._normalizer.version >= (3, 9): + message += ": {}".format(first.value) + self.add_issue(first, message=message) + else: + arg_set.add(first.value) + else: + if kw_unpacking_only: + # f(**x, y) + message = "positional argument follows keyword argument unpacking" + self.add_issue(argument, message=message) + elif kw_only: + # f(x=2, y) + message = "positional argument follows keyword argument" + self.add_issue(argument, message=message) + + +@ErrorFinder.register_rule(type='parameters') +@ErrorFinder.register_rule(type='lambdef') +class _ParameterRule(SyntaxRule): + # def f(x=3, y): pass + message = "non-default argument follows default argument" + + def is_issue(self, node): + param_names = set() + default_only = False + star_seen = False + for p in _iter_params(node): + if p.type == 'operator': + if p.value == '*': + star_seen = True + default_only = False + continue + + if p.name.value in param_names: + message = "duplicate argument '%s' in function definition" + self.add_issue(p.name, message=message % p.name.value) + param_names.add(p.name.value) + + if not star_seen: + if p.default is None and not p.star_count: + if default_only: + return True + elif p.star_count: + star_seen = True + default_only = False + else: + default_only = True + + +@ErrorFinder.register_rule(type='try_stmt') +class _TryStmtRule(SyntaxRule): + message = "default 'except:' must be last" + + def is_issue(self, try_stmt): + default_except = None + for except_clause in try_stmt.children[3::3]: + if except_clause in ('else', 'finally'): + break + if except_clause == 'except': + default_except = except_clause + elif default_except is not None: + self.add_issue(default_except, message=self.message) + + +@ErrorFinder.register_rule(type='fstring') +class _FStringRule(SyntaxRule): + _fstring_grammar = None + message_expr = "f-string expression part cannot include a backslash" + message_nested = "f-string: expressions nested too deeply" + message_conversion = "f-string: invalid conversion character: expected 's', 'r', or 'a'" + + def _check_format_spec(self, format_spec, depth): + self._check_fstring_contents(format_spec.children[1:], depth) + + def _check_fstring_expr(self, fstring_expr, depth): + if depth >= 2: + self.add_issue(fstring_expr, message=self.message_nested) + + expr = fstring_expr.children[1] + if '\\' in expr.get_code(): + self.add_issue(expr, message=self.message_expr) + + children_2 = fstring_expr.children[2] + if children_2.type == 'operator' and children_2.value == '=': + conversion = fstring_expr.children[3] + else: + conversion = children_2 + if conversion.type == 'fstring_conversion': + name = conversion.children[1] + if name.value not in ('s', 'r', 'a'): + self.add_issue(name, message=self.message_conversion) + + format_spec = fstring_expr.children[-2] + if format_spec.type == 'fstring_format_spec': + self._check_format_spec(format_spec, depth + 1) + + def is_issue(self, fstring): + self._check_fstring_contents(fstring.children[1:-1]) + + def _check_fstring_contents(self, children, depth=0): + for fstring_content in children: + if fstring_content.type == 'fstring_expr': + self._check_fstring_expr(fstring_content, depth) + + +class _CheckAssignmentRule(SyntaxRule): + def _check_assignment(self, node, is_deletion=False, is_namedexpr=False, is_aug_assign=False): + error = None + type_ = node.type + if type_ == 'lambdef': + error = 'lambda' + elif type_ == 'atom': + first, second = node.children[:2] + error = _get_comprehension_type(node) + if error is None: + if second.type == 'dictorsetmaker': + if self._normalizer.version < (3, 8): + error = 'literal' + else: + if second.children[1] == ':': + error = 'dict display' + else: + error = 'set display' + elif first == "{" and second == "}": + if self._normalizer.version < (3, 8): + error = 'literal' + else: + error = "dict display" + elif first == "{" and len(node.children) > 2: + if self._normalizer.version < (3, 8): + error = 'literal' + else: + error = "set display" + elif first in ('(', '['): + if second.type == 'yield_expr': + error = 'yield expression' + elif second.type == 'testlist_comp': + # ([a, b] := [1, 2]) + # ((a, b) := [1, 2]) + if is_namedexpr: + if first == '(': + error = 'tuple' + elif first == '[': + error = 'list' + + # This is not a comprehension, they were handled + # further above. + for child in second.children[::2]: + self._check_assignment(child, is_deletion, is_namedexpr, is_aug_assign) + else: # Everything handled, must be useless brackets. + self._check_assignment(second, is_deletion, is_namedexpr, is_aug_assign) + elif type_ == 'keyword': + if node.value == "yield": + error = "yield expression" + elif self._normalizer.version < (3, 8): + error = 'keyword' + else: + error = str(node.value) + elif type_ == 'operator': + if node.value == '...': + error = 'Ellipsis' + elif type_ == 'comparison': + error = 'comparison' + elif type_ in ('string', 'number', 'strings'): + error = 'literal' + elif type_ == 'yield_expr': + # This one seems to be a slightly different warning in Python. + message = 'assignment to yield expression not possible' + self.add_issue(node, message=message) + elif type_ == 'test': + error = 'conditional expression' + elif type_ in ('atom_expr', 'power'): + if node.children[0] == 'await': + error = 'await expression' + elif node.children[-2] == '**': + error = 'operator' + else: + # Has a trailer + trailer = node.children[-1] + assert trailer.type == 'trailer' + if trailer.children[0] == '(': + error = 'function call' + elif is_namedexpr and trailer.children[0] == '[': + error = 'subscript' + elif is_namedexpr and trailer.children[0] == '.': + error = 'attribute' + elif type_ == "fstring": + if self._normalizer.version < (3, 8): + error = 'literal' + else: + error = "f-string expression" + elif type_ in ('testlist_star_expr', 'exprlist', 'testlist'): + for child in node.children[::2]: + self._check_assignment(child, is_deletion, is_namedexpr, is_aug_assign) + elif ('expr' in type_ and type_ != 'star_expr' # is a substring + or '_test' in type_ + or type_ in ('term', 'factor')): + error = 'operator' + elif type_ == "star_expr": + if is_deletion: + if self._normalizer.version >= (3, 9): + error = "starred" + else: + self.add_issue(node, message="can't use starred expression here") + else: + if self._normalizer.version >= (3, 9): + ancestor = node.parent + else: + ancestor = _skip_parens_bottom_up(node) + if ancestor.type not in _STAR_EXPR_PARENTS and not is_aug_assign \ + and not (ancestor.type == 'atom' and ancestor.children[0] == '['): + message = "starred assignment target must be in a list or tuple" + self.add_issue(node, message=message) + + self._check_assignment(node.children[1]) + + if error is not None: + if is_namedexpr: + message = 'cannot use assignment expressions with %s' % error + else: + cannot = "can't" if self._normalizer.version < (3, 8) else "cannot" + message = ' '.join([cannot, "delete" if is_deletion else "assign to", error]) + self.add_issue(node, message=message) + + +@ErrorFinder.register_rule(type='sync_comp_for') +class _CompForRule(_CheckAssignmentRule): + message = "asynchronous comprehension outside of an asynchronous function" + + def is_issue(self, node): + expr_list = node.children[1] + if expr_list.type != 'expr_list': # Already handled. + self._check_assignment(expr_list) + + return node.parent.children[0] == 'async' \ + and not self._normalizer.context.is_async_funcdef() + + +@ErrorFinder.register_rule(type='expr_stmt') +class _ExprStmtRule(_CheckAssignmentRule): + message = "illegal expression for augmented assignment" + extended_message = "'{target}' is an " + message + + def is_issue(self, node): + augassign = node.children[1] + is_aug_assign = augassign != '=' and augassign.type != 'annassign' + + if self._normalizer.version <= (3, 8) or not is_aug_assign: + for before_equal in node.children[:-2:2]: + self._check_assignment(before_equal, is_aug_assign=is_aug_assign) + + if is_aug_assign: + target = _remove_parens(node.children[0]) + # a, a[b], a.b + + if target.type == "name" or ( + target.type in ("atom_expr", "power") + and target.children[1].type == "trailer" + and target.children[-1].children[0] != "(" + ): + return False + + if self._normalizer.version <= (3, 8): + return True + else: + self.add_issue( + node, + message=self.extended_message.format( + target=_get_rhs_name(node.children[0], self._normalizer.version) + ), + ) + + +@ErrorFinder.register_rule(type='with_item') +class _WithItemRule(_CheckAssignmentRule): + def is_issue(self, with_item): + self._check_assignment(with_item.children[2]) + + +@ErrorFinder.register_rule(type='del_stmt') +class _DelStmtRule(_CheckAssignmentRule): + def is_issue(self, del_stmt): + child = del_stmt.children[1] + + if child.type != 'expr_list': # Already handled. + self._check_assignment(child, is_deletion=True) + + +@ErrorFinder.register_rule(type='expr_list') +class _ExprListRule(_CheckAssignmentRule): + def is_issue(self, expr_list): + for expr in expr_list.children[::2]: + self._check_assignment(expr) + + +@ErrorFinder.register_rule(type='for_stmt') +class _ForStmtRule(_CheckAssignmentRule): + def is_issue(self, for_stmt): + # Some of the nodes here are already used, so no else if + expr_list = for_stmt.children[1] + if expr_list.type != 'expr_list': # Already handled. + self._check_assignment(expr_list) + + +@ErrorFinder.register_rule(type='namedexpr_test') +class _NamedExprRule(_CheckAssignmentRule): + # namedexpr_test: test [':=' test] + + def is_issue(self, namedexpr_test): + # assigned name + first = namedexpr_test.children[0] + + def search_namedexpr_in_comp_for(node): + while True: + parent = node.parent + if parent is None: + return parent + if parent.type == 'sync_comp_for' and parent.children[3] == node: + return parent + node = parent + + if search_namedexpr_in_comp_for(namedexpr_test): + # [i+1 for i in (i := range(5))] + # [i+1 for i in (j := range(5))] + # [i+1 for i in (lambda: (j := range(5)))()] + message = 'assignment expression cannot be used in a comprehension iterable expression' + self.add_issue(namedexpr_test, message=message) + + # defined names + exprlist = list() + + def process_comp_for(comp_for): + if comp_for.type == 'sync_comp_for': + comp = comp_for + elif comp_for.type == 'comp_for': + comp = comp_for.children[1] + exprlist.extend(_get_for_stmt_definition_exprs(comp)) + + def search_all_comp_ancestors(node): + has_ancestors = False + while True: + node = node.search_ancestor('testlist_comp', 'dictorsetmaker') + if node is None: + break + for child in node.children: + if child.type in _COMP_FOR_TYPES: + process_comp_for(child) + has_ancestors = True + break + return has_ancestors + + # check assignment expressions in comprehensions + search_all = search_all_comp_ancestors(namedexpr_test) + if search_all: + if self._normalizer.context.node.type == 'classdef': + message = 'assignment expression within a comprehension ' \ + 'cannot be used in a class body' + self.add_issue(namedexpr_test, message=message) + + namelist = [expr.value for expr in exprlist if expr.type == 'name'] + if first.type == 'name' and first.value in namelist: + # [i := 0 for i, j in range(5)] + # [[(i := i) for j in range(5)] for i in range(5)] + # [i for i, j in range(5) if True or (i := 1)] + # [False and (i := 0) for i, j in range(5)] + message = 'assignment expression cannot rebind ' \ + 'comprehension iteration variable %r' % first.value + self.add_issue(namedexpr_test, message=message) + + self._check_assignment(first, is_namedexpr=True) diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar310.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar310.txt new file mode 100644 index 000000000..f092050d8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar310.txt @@ -0,0 +1,169 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' namedexpr_test NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: ( + (tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [',' [ tfpdef ['=' test] ( + ',' tfpdef ['=' test])* ([',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]]) + | '*' [tfpdef] (',' tfpdef ['=' test])* ([',' ['**' tfpdef [',']]]) + | '**' tfpdef [',']]] ) +| (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +) +tfpdef: NAME [':' test] +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' (yield_expr|testlist_star_expr)] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +namedexpr_test: test [':=' test] +test: or_test ['if' or_test 'else' test] | lambdef +lambdef: 'lambda' [varargslist] ':' test +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test [':=' test] | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test [':=' test] | star_expr) + (comp_for | (',' (test [':=' test] | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' or_test [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar311.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar311.txt new file mode 100644 index 000000000..f092050d8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar311.txt @@ -0,0 +1,169 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' namedexpr_test NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: ( + (tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [',' [ tfpdef ['=' test] ( + ',' tfpdef ['=' test])* ([',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]]) + | '*' [tfpdef] (',' tfpdef ['=' test])* ([',' ['**' tfpdef [',']]]) + | '**' tfpdef [',']]] ) +| (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +) +tfpdef: NAME [':' test] +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' (yield_expr|testlist_star_expr)] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +namedexpr_test: test [':=' test] +test: or_test ['if' or_test 'else' test] | lambdef +lambdef: 'lambda' [varargslist] ':' test +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test [':=' test] | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test [':=' test] | star_expr) + (comp_for | (',' (test [':=' test] | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' or_test [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar312.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar312.txt new file mode 100644 index 000000000..f092050d8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar312.txt @@ -0,0 +1,169 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' namedexpr_test NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: ( + (tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [',' [ tfpdef ['=' test] ( + ',' tfpdef ['=' test])* ([',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]]) + | '*' [tfpdef] (',' tfpdef ['=' test])* ([',' ['**' tfpdef [',']]]) + | '**' tfpdef [',']]] ) +| (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +) +tfpdef: NAME [':' test] +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' (yield_expr|testlist_star_expr)] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +namedexpr_test: test [':=' test] +test: or_test ['if' or_test 'else' test] | lambdef +lambdef: 'lambda' [varargslist] ':' test +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test [':=' test] | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test [':=' test] | star_expr) + (comp_for | (',' (test [':=' test] | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' or_test [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar36.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar36.txt new file mode 100644 index 000000000..e79620668 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar36.txt @@ -0,0 +1,158 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://docs.python.org/devguide/grammar.html + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER +decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +# NOTE: Francisco Souza/Reinoud Elhorst, using ASYNC/'await' keywords instead of +# skipping python3.5+ compatibility, in favour of 3.7 solution +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +tfpdef: NAME [':' test] +varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' test] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] +while_stmt: 'while' test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +test: or_test ['if' or_test 'else' test] | lambdef +test_nocond: or_test | lambdef_nocond +lambdef: 'lambda' [varargslist] ':' test +lambdef_nocond: 'lambda' [varargslist] ':' test_nocond +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test | star_expr) + (comp_for | (',' (test | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' test_nocond [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar37.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar37.txt new file mode 100644 index 000000000..f4a929fe9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar37.txt @@ -0,0 +1,156 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://docs.python.org/devguide/grammar.html + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER +decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +tfpdef: NAME [':' test] +varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' test] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] +while_stmt: 'while' test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +test: or_test ['if' or_test 'else' test] | lambdef +test_nocond: or_test | lambdef_nocond +lambdef: 'lambda' [varargslist] ':' test +lambdef_nocond: 'lambda' [varargslist] ':' test_nocond +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test | star_expr) + (comp_for | (',' (test | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' test_nocond [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar38.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar38.txt new file mode 100644 index 000000000..7288d556f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar38.txt @@ -0,0 +1,171 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: ( + (tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [',' [ tfpdef ['=' test] ( + ',' tfpdef ['=' test])* ([',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]]) + | '*' [tfpdef] (',' tfpdef ['=' test])* ([',' ['**' tfpdef [',']]]) + | '**' tfpdef [',']]] ) +| (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +) +tfpdef: NAME [':' test] +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' (yield_expr|testlist_star_expr)] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +namedexpr_test: test [':=' test] +test: or_test ['if' or_test 'else' test] | lambdef +test_nocond: or_test | lambdef_nocond +lambdef: 'lambda' [varargslist] ':' test +lambdef_nocond: 'lambda' [varargslist] ':' test_nocond +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test | star_expr) + (comp_for | (',' (test | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' test_nocond [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/grammar39.txt b/myenv/lib/python3.10/site-packages/parso/python/grammar39.txt new file mode 100644 index 000000000..ae46033cf --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/grammar39.txt @@ -0,0 +1,169 @@ +# Grammar for Python + +# NOTE WELL: You should also follow all the steps listed at +# https://devguide.python.org/grammar/ + +# Start symbols for the grammar: +# single_input is a single interactive statement; +# file_input is a module or sequence of commands read from an input file; +# eval_input is the input for the eval() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +file_input: stmt* ENDMARKER +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' namedexpr_test NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef | async_funcdef) + +async_funcdef: 'async' funcdef +funcdef: 'def' NAME parameters ['->' test] ':' suite + +parameters: '(' [typedargslist] ')' +typedargslist: ( + (tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [',' [ tfpdef ['=' test] ( + ',' tfpdef ['=' test])* ([',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]]) + | '*' [tfpdef] (',' tfpdef ['=' test])* ([',' ['**' tfpdef [',']]]) + | '**' tfpdef [',']]] ) +| (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [ + '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']]] + | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]] + | '**' tfpdef [',']) +) +tfpdef: NAME [':' test] +varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [ + '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [',']]] + | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] + | '**' vfpdef [','] +) +vfpdef: NAME + +stmt: simple_stmt | compound_stmt | NEWLINE +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | nonlocal_stmt | assert_stmt) +expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +annassign: ':' test ['=' (yield_expr|testlist_star_expr)] +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal and annotated assignments, additional restrictions enforced by the interpreter +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist_star_expr] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS +import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: 'global' NAME (',' NAME)* +nonlocal_stmt: 'nonlocal' NAME (',' NAME)* +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: 'async' (funcdef | with_stmt | for_stmt) +if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite] +while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test ['as' NAME]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +namedexpr_test: test [':=' test] +test: or_test ['if' or_test 'else' test] | lambdef +lambdef: 'lambda' [varargslist] ':' test +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +# <> isn't actually a valid comparison operator in Python. It's here for the +# sake of a __future__ import described in PEP 401 (which really works :-) +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'@'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom_expr ['**' factor] +atom_expr: ['await'] atom trailer* +atom: ('(' [yield_expr|testlist_comp] ')' | + '[' [testlist_comp] ']' | + '{' [dictorsetmaker] '}' | + NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False') +testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] ) +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictorsetmaker: ( ((test ':' test | '**' expr) + (comp_for | (',' (test ':' test | '**' expr))* [','])) | + ((test [':=' test] | star_expr) + (comp_for | (',' (test [':=' test] | star_expr))* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: argument (',' argument)* [','] + +# The reason that keywords are test nodes instead of NAME is that using NAME +# results in an ambiguity. ast.c makes sure it's a NAME. +# "test '=' test" is really "keyword '=' test", but we have no such token. +# These need to be in a single rule to avoid grammar that is ambiguous +# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, +# we explicitly match '*' here, too, to give it proper precedence. +# Illegal combinations and orderings are blocked in ast.c: +# multiple (test comp_for) arguments are blocked; keyword unpackings +# that precede iterable unpackings are blocked; etc. +argument: ( test [comp_for] | + test ':=' test | + test '=' test | + '**' test | + '*' test ) + +comp_iter: comp_for | comp_if +sync_comp_for: 'for' exprlist 'in' or_test [comp_iter] +comp_for: ['async'] sync_comp_for +comp_if: 'if' or_test [comp_iter] + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [yield_arg] +yield_arg: 'from' test | testlist_star_expr + +strings: (STRING | fstring)+ +fstring: FSTRING_START fstring_content* FSTRING_END +fstring_content: FSTRING_STRING | fstring_expr +fstring_conversion: '!' NAME +fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}' +fstring_format_spec: ':' fstring_content* diff --git a/myenv/lib/python3.10/site-packages/parso/python/parser.py b/myenv/lib/python3.10/site-packages/parso/python/parser.py new file mode 100644 index 000000000..fa45e8b15 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/parser.py @@ -0,0 +1,206 @@ +from parso.python import tree +from parso.python.token import PythonTokenTypes +from parso.parser import BaseParser + + +NAME = PythonTokenTypes.NAME +INDENT = PythonTokenTypes.INDENT +DEDENT = PythonTokenTypes.DEDENT + + +class Parser(BaseParser): + """ + This class is used to parse a Python file, it then divides them into a + class structure of different scopes. + + :param pgen_grammar: The grammar object of pgen2. Loaded by load_grammar. + """ + + node_map = { + 'expr_stmt': tree.ExprStmt, + 'classdef': tree.Class, + 'funcdef': tree.Function, + 'file_input': tree.Module, + 'import_name': tree.ImportName, + 'import_from': tree.ImportFrom, + 'break_stmt': tree.KeywordStatement, + 'continue_stmt': tree.KeywordStatement, + 'return_stmt': tree.ReturnStmt, + 'raise_stmt': tree.KeywordStatement, + 'yield_expr': tree.YieldExpr, + 'del_stmt': tree.KeywordStatement, + 'pass_stmt': tree.KeywordStatement, + 'global_stmt': tree.GlobalStmt, + 'nonlocal_stmt': tree.KeywordStatement, + 'print_stmt': tree.KeywordStatement, + 'assert_stmt': tree.AssertStmt, + 'if_stmt': tree.IfStmt, + 'with_stmt': tree.WithStmt, + 'for_stmt': tree.ForStmt, + 'while_stmt': tree.WhileStmt, + 'try_stmt': tree.TryStmt, + 'sync_comp_for': tree.SyncCompFor, + # Not sure if this is the best idea, but IMO it's the easiest way to + # avoid extreme amounts of work around the subtle difference of 2/3 + # grammar in list comoprehensions. + 'decorator': tree.Decorator, + 'lambdef': tree.Lambda, + 'lambdef_nocond': tree.Lambda, + 'namedexpr_test': tree.NamedExpr, + } + default_node = tree.PythonNode + + # Names/Keywords are handled separately + _leaf_map = { + PythonTokenTypes.STRING: tree.String, + PythonTokenTypes.NUMBER: tree.Number, + PythonTokenTypes.NEWLINE: tree.Newline, + PythonTokenTypes.ENDMARKER: tree.EndMarker, + PythonTokenTypes.FSTRING_STRING: tree.FStringString, + PythonTokenTypes.FSTRING_START: tree.FStringStart, + PythonTokenTypes.FSTRING_END: tree.FStringEnd, + } + + def __init__(self, pgen_grammar, error_recovery=True, start_nonterminal='file_input'): + super().__init__(pgen_grammar, start_nonterminal, + error_recovery=error_recovery) + + self.syntax_errors = [] + self._omit_dedent_list = [] + self._indent_counter = 0 + + def parse(self, tokens): + if self._error_recovery: + if self._start_nonterminal != 'file_input': + raise NotImplementedError + + tokens = self._recovery_tokenize(tokens) + + return super().parse(tokens) + + def convert_node(self, nonterminal, children): + """ + Convert raw node information to a PythonBaseNode instance. + + This is passed to the parser driver which calls it whenever a reduction of a + grammar rule produces a new complete node, so that the tree is build + strictly bottom-up. + """ + try: + node = self.node_map[nonterminal](children) + except KeyError: + if nonterminal == 'suite': + # We don't want the INDENT/DEDENT in our parser tree. Those + # leaves are just cancer. They are virtual leaves and not real + # ones and therefore have pseudo start/end positions and no + # prefixes. Just ignore them. + children = [children[0]] + children[2:-1] + node = self.default_node(nonterminal, children) + return node + + def convert_leaf(self, type, value, prefix, start_pos): + # print('leaf', repr(value), token.tok_name[type]) + if type == NAME: + if value in self._pgen_grammar.reserved_syntax_strings: + return tree.Keyword(value, start_pos, prefix) + else: + return tree.Name(value, start_pos, prefix) + + return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix) + + def error_recovery(self, token): + tos_nodes = self.stack[-1].nodes + if tos_nodes: + last_leaf = tos_nodes[-1].get_last_leaf() + else: + last_leaf = None + + if self._start_nonterminal == 'file_input' and \ + (token.type == PythonTokenTypes.ENDMARKER + or token.type == DEDENT and not last_leaf.value.endswith('\n') + and not last_leaf.value.endswith('\r')): + # In Python statements need to end with a newline. But since it's + # possible (and valid in Python) that there's no newline at the + # end of a file, we have to recover even if the user doesn't want + # error recovery. + if self.stack[-1].dfa.from_rule == 'simple_stmt': + try: + plan = self.stack[-1].dfa.transitions[PythonTokenTypes.NEWLINE] + except KeyError: + pass + else: + if plan.next_dfa.is_final and not plan.dfa_pushes: + # We are ignoring here that the newline would be + # required for a simple_stmt. + self.stack[-1].dfa = plan.next_dfa + self._add_token(token) + return + + if not self._error_recovery: + return super().error_recovery(token) + + def current_suite(stack): + # For now just discard everything that is not a suite or + # file_input, if we detect an error. + for until_index, stack_node in reversed(list(enumerate(stack))): + # `suite` can sometimes be only simple_stmt, not stmt. + if stack_node.nonterminal == 'file_input': + break + elif stack_node.nonterminal == 'suite': + # In the case where we just have a newline we don't want to + # do error recovery here. In all other cases, we want to do + # error recovery. + if len(stack_node.nodes) != 1: + break + return until_index + + until_index = current_suite(self.stack) + + if self._stack_removal(until_index + 1): + self._add_token(token) + else: + typ, value, start_pos, prefix = token + if typ == INDENT: + # For every deleted INDENT we have to delete a DEDENT as well. + # Otherwise the parser will get into trouble and DEDENT too early. + self._omit_dedent_list.append(self._indent_counter) + + error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix) + self.stack[-1].nodes.append(error_leaf) + + tos = self.stack[-1] + if tos.nonterminal == 'suite': + # Need at least one statement in the suite. This happend with the + # error recovery above. + try: + tos.dfa = tos.dfa.arcs['stmt'] + except KeyError: + # We're already in a final state. + pass + + def _stack_removal(self, start_index): + all_nodes = [node for stack_node in self.stack[start_index:] for node in stack_node.nodes] + + if all_nodes: + node = tree.PythonErrorNode(all_nodes) + self.stack[start_index - 1].nodes.append(node) + + self.stack[start_index:] = [] + return bool(all_nodes) + + def _recovery_tokenize(self, tokens): + for token in tokens: + typ = token[0] + if typ == DEDENT: + # We need to count indents, because if we just omit any DEDENT, + # we might omit them in the wrong place. + o = self._omit_dedent_list + if o and o[-1] == self._indent_counter: + o.pop() + self._indent_counter -= 1 + continue + + self._indent_counter -= 1 + elif typ == INDENT: + self._indent_counter += 1 + yield token diff --git a/myenv/lib/python3.10/site-packages/parso/python/pep8.py b/myenv/lib/python3.10/site-packages/parso/python/pep8.py new file mode 100644 index 000000000..c492dae68 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/pep8.py @@ -0,0 +1,767 @@ +import re +from contextlib import contextmanager +from typing import Tuple + +from parso.python.errors import ErrorFinder, ErrorFinderConfig +from parso.normalizer import Rule +from parso.python.tree import Flow, Scope + + +_IMPORT_TYPES = ('import_name', 'import_from') +_SUITE_INTRODUCERS = ('classdef', 'funcdef', 'if_stmt', 'while_stmt', + 'for_stmt', 'try_stmt', 'with_stmt') +_NON_STAR_TYPES = ('term', 'import_from', 'power') +_OPENING_BRACKETS = '(', '[', '{' +_CLOSING_BRACKETS = ')', ']', '}' +_FACTOR = '+', '-', '~' +_ALLOW_SPACE = '*', '+', '-', '**', '/', '//', '@' +_BITWISE_OPERATOR = '<<', '>>', '|', '&', '^' +_NEEDS_SPACE: Tuple[str, ...] = ( + '=', '%', '->', + '<', '>', '==', '>=', '<=', '<>', '!=', + '+=', '-=', '*=', '@=', '/=', '%=', '&=', '|=', '^=', '<<=', + '>>=', '**=', '//=') +_NEEDS_SPACE += _BITWISE_OPERATOR +_IMPLICIT_INDENTATION_TYPES = ('dictorsetmaker', 'argument') +_POSSIBLE_SLICE_PARENTS = ('subscript', 'subscriptlist', 'sliceop') + + +class IndentationTypes: + VERTICAL_BRACKET = object() + HANGING_BRACKET = object() + BACKSLASH = object() + SUITE = object() + IMPLICIT = object() + + +class IndentationNode(object): + type = IndentationTypes.SUITE + + def __init__(self, config, indentation, parent=None): + self.bracket_indentation = self.indentation = indentation + self.parent = parent + + def __repr__(self): + return '<%s>' % self.__class__.__name__ + + def get_latest_suite_node(self): + n = self + while n is not None: + if n.type == IndentationTypes.SUITE: + return n + + n = n.parent + + +class BracketNode(IndentationNode): + def __init__(self, config, leaf, parent, in_suite_introducer=False): + self.leaf = leaf + + # Figure out here what the indentation is. For chained brackets + # we can basically use the previous indentation. + previous_leaf = leaf + n = parent + if n.type == IndentationTypes.IMPLICIT: + n = n.parent + while True: + if hasattr(n, 'leaf') and previous_leaf.line != n.leaf.line: + break + + previous_leaf = previous_leaf.get_previous_leaf() + if not isinstance(n, BracketNode) or previous_leaf != n.leaf: + break + n = n.parent + parent_indentation = n.indentation + + next_leaf = leaf.get_next_leaf() + if '\n' in next_leaf.prefix or '\r' in next_leaf.prefix: + # This implies code like: + # foobarbaz( + # a, + # b, + # ) + self.bracket_indentation = parent_indentation \ + + config.closing_bracket_hanging_indentation + self.indentation = parent_indentation + config.indentation + self.type = IndentationTypes.HANGING_BRACKET + else: + # Implies code like: + # foobarbaz( + # a, + # b, + # ) + expected_end_indent = leaf.end_pos[1] + if '\t' in config.indentation: + self.indentation = None + else: + self.indentation = ' ' * expected_end_indent + self.bracket_indentation = self.indentation + self.type = IndentationTypes.VERTICAL_BRACKET + + if in_suite_introducer and parent.type == IndentationTypes.SUITE \ + and self.indentation == parent_indentation + config.indentation: + self.indentation += config.indentation + # The closing bracket should have the same indentation. + self.bracket_indentation = self.indentation + self.parent = parent + + +class ImplicitNode(BracketNode): + """ + Implicit indentation after keyword arguments, default arguments, + annotations and dict values. + """ + def __init__(self, config, leaf, parent): + super().__init__(config, leaf, parent) + self.type = IndentationTypes.IMPLICIT + + next_leaf = leaf.get_next_leaf() + if leaf == ':' and '\n' not in next_leaf.prefix and '\r' not in next_leaf.prefix: + self.indentation += ' ' + + +class BackslashNode(IndentationNode): + type = IndentationTypes.BACKSLASH + + def __init__(self, config, parent_indentation, containing_leaf, spacing, parent=None): + expr_stmt = containing_leaf.search_ancestor('expr_stmt') + if expr_stmt is not None: + equals = expr_stmt.children[-2] + + if '\t' in config.indentation: + # TODO unite with the code of BracketNode + self.indentation = None + else: + # If the backslash follows the equals, use normal indentation + # otherwise it should align with the equals. + if equals.end_pos == spacing.start_pos: + self.indentation = parent_indentation + config.indentation + else: + # +1 because there is a space. + self.indentation = ' ' * (equals.end_pos[1] + 1) + else: + self.indentation = parent_indentation + config.indentation + self.bracket_indentation = self.indentation + self.parent = parent + + +def _is_magic_name(name): + return name.value.startswith('__') and name.value.endswith('__') + + +class PEP8Normalizer(ErrorFinder): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._previous_part = None + self._previous_leaf = None + self._on_newline = True + self._newline_count = 0 + self._wanted_newline_count = None + self._max_new_lines_in_prefix = 0 + self._new_statement = True + self._implicit_indentation_possible = False + # The top of stack of the indentation nodes. + self._indentation_tos = self._last_indentation_tos = \ + IndentationNode(self._config, indentation='') + self._in_suite_introducer = False + + if ' ' in self._config.indentation: + self._indentation_type = 'spaces' + self._wrong_indentation_char = '\t' + else: + self._indentation_type = 'tabs' + self._wrong_indentation_char = ' ' + + @contextmanager + def visit_node(self, node): + with super().visit_node(node): + with self._visit_node(node): + yield + + @contextmanager + def _visit_node(self, node): + typ = node.type + + if typ in 'import_name': + names = node.get_defined_names() + if len(names) > 1: + for name in names[:1]: + self.add_issue(name, 401, 'Multiple imports on one line') + elif typ == 'lambdef': + expr_stmt = node.parent + # Check if it's simply defining a single name, not something like + # foo.bar or x[1], where using a lambda could make more sense. + if expr_stmt.type == 'expr_stmt' and any(n.type == 'name' + for n in expr_stmt.children[:-2:2]): + self.add_issue(node, 731, 'Do not assign a lambda expression, use a def') + elif typ == 'try_stmt': + for child in node.children: + # Here we can simply check if it's an except, because otherwise + # it would be an except_clause. + if child.type == 'keyword' and child.value == 'except': + self.add_issue(child, 722, 'Do not use bare except, specify exception instead') + elif typ == 'comparison': + for child in node.children: + if child.type not in ('atom_expr', 'power'): + continue + if len(child.children) > 2: + continue + trailer = child.children[1] + atom = child.children[0] + if trailer.type == 'trailer' and atom.type == 'name' \ + and atom.value == 'type': + self.add_issue(node, 721, "Do not compare types, use 'isinstance()") + break + elif typ == 'file_input': + endmarker = node.children[-1] + prev = endmarker.get_previous_leaf() + prefix = endmarker.prefix + if (not prefix.endswith('\n') and not prefix.endswith('\r') and ( + prefix or prev is None or prev.value not in {'\n', '\r\n', '\r'})): + self.add_issue(endmarker, 292, "No newline at end of file") + + if typ in _IMPORT_TYPES: + simple_stmt = node.parent + module = simple_stmt.parent + if module.type == 'file_input': + index = module.children.index(simple_stmt) + for child in module.children[:index]: + children = [child] + if child.type == 'simple_stmt': + # Remove the newline. + children = child.children[:-1] + + found_docstring = False + for c in children: + if c.type == 'string' and not found_docstring: + continue + found_docstring = True + + if c.type == 'expr_stmt' and \ + all(_is_magic_name(n) for n in c.get_defined_names()): + continue + + if c.type in _IMPORT_TYPES or isinstance(c, Flow): + continue + + self.add_issue(node, 402, 'Module level import not at top of file') + break + else: + continue + break + + implicit_indentation_possible = typ in _IMPLICIT_INDENTATION_TYPES + in_introducer = typ in _SUITE_INTRODUCERS + if in_introducer: + self._in_suite_introducer = True + elif typ == 'suite': + if self._indentation_tos.type == IndentationTypes.BACKSLASH: + self._indentation_tos = self._indentation_tos.parent + + self._indentation_tos = IndentationNode( + self._config, + self._indentation_tos.indentation + self._config.indentation, + parent=self._indentation_tos + ) + elif implicit_indentation_possible: + self._implicit_indentation_possible = True + yield + if typ == 'suite': + assert self._indentation_tos.type == IndentationTypes.SUITE + self._indentation_tos = self._indentation_tos.parent + # If we dedent, no lines are needed anymore. + self._wanted_newline_count = None + elif implicit_indentation_possible: + self._implicit_indentation_possible = False + if self._indentation_tos.type == IndentationTypes.IMPLICIT: + self._indentation_tos = self._indentation_tos.parent + elif in_introducer: + self._in_suite_introducer = False + if typ in ('classdef', 'funcdef'): + self._wanted_newline_count = self._get_wanted_blank_lines_count() + + def _check_tabs_spaces(self, spacing): + if self._wrong_indentation_char in spacing.value: + self.add_issue(spacing, 101, 'Indentation contains ' + self._indentation_type) + return True + return False + + def _get_wanted_blank_lines_count(self): + suite_node = self._indentation_tos.get_latest_suite_node() + return int(suite_node.parent is None) + 1 + + def _reset_newlines(self, spacing, leaf, is_comment=False): + self._max_new_lines_in_prefix = \ + max(self._max_new_lines_in_prefix, self._newline_count) + + wanted = self._wanted_newline_count + if wanted is not None: + # Need to substract one + blank_lines = self._newline_count - 1 + if wanted > blank_lines and leaf.type != 'endmarker': + # In case of a comment we don't need to add the issue, yet. + if not is_comment: + # TODO end_pos wrong. + code = 302 if wanted == 2 else 301 + message = "expected %s blank line, found %s" \ + % (wanted, blank_lines) + self.add_issue(spacing, code, message) + self._wanted_newline_count = None + else: + self._wanted_newline_count = None + + if not is_comment: + wanted = self._get_wanted_blank_lines_count() + actual = self._max_new_lines_in_prefix - 1 + + val = leaf.value + needs_lines = ( + val == '@' and leaf.parent.type == 'decorator' + or ( + val == 'class' + or val == 'async' and leaf.get_next_leaf() == 'def' + or val == 'def' and self._previous_leaf != 'async' + ) and leaf.parent.parent.type != 'decorated' + ) + if needs_lines and actual < wanted: + func_or_cls = leaf.parent + suite = func_or_cls.parent + if suite.type == 'decorated': + suite = suite.parent + + # The first leaf of a file or a suite should not need blank + # lines. + if suite.children[int(suite.type == 'suite')] != func_or_cls: + code = 302 if wanted == 2 else 301 + message = "expected %s blank line, found %s" \ + % (wanted, actual) + self.add_issue(spacing, code, message) + + self._max_new_lines_in_prefix = 0 + + self._newline_count = 0 + + def visit_leaf(self, leaf): + super().visit_leaf(leaf) + for part in leaf._split_prefix(): + if part.type == 'spacing': + # This part is used for the part call after for. + break + self._visit_part(part, part.create_spacing_part(), leaf) + + self._analyse_non_prefix(leaf) + self._visit_part(leaf, part, leaf) + + # Cleanup + self._last_indentation_tos = self._indentation_tos + + self._new_statement = leaf.type == 'newline' + + # TODO does this work? with brackets and stuff? + if leaf.type == 'newline' and \ + self._indentation_tos.type == IndentationTypes.BACKSLASH: + self._indentation_tos = self._indentation_tos.parent + + if leaf.value == ':' and leaf.parent.type in _SUITE_INTRODUCERS: + self._in_suite_introducer = False + elif leaf.value == 'elif': + self._in_suite_introducer = True + + if not self._new_statement: + self._reset_newlines(part, leaf) + self._max_blank_lines = 0 + + self._previous_leaf = leaf + + return leaf.value + + def _visit_part(self, part, spacing, leaf): + value = part.value + type_ = part.type + if type_ == 'error_leaf': + return + + if value == ',' and part.parent.type == 'dictorsetmaker': + self._indentation_tos = self._indentation_tos.parent + + node = self._indentation_tos + + if type_ == 'comment': + if value.startswith('##'): + # Whole blocks of # should not raise an error. + if value.lstrip('#'): + self.add_issue(part, 266, "Too many leading '#' for block comment.") + elif self._on_newline: + if not re.match(r'#:? ', value) and not value == '#' \ + and not (value.startswith('#!') and part.start_pos == (1, 0)): + self.add_issue(part, 265, "Block comment should start with '# '") + else: + if not re.match(r'#:? [^ ]', value): + self.add_issue(part, 262, "Inline comment should start with '# '") + + self._reset_newlines(spacing, leaf, is_comment=True) + elif type_ == 'newline': + if self._newline_count > self._get_wanted_blank_lines_count(): + self.add_issue(part, 303, "Too many blank lines (%s)" % self._newline_count) + elif leaf in ('def', 'class') \ + and leaf.parent.parent.type == 'decorated': + self.add_issue(part, 304, "Blank lines found after function decorator") + + self._newline_count += 1 + + if type_ == 'backslash': + # TODO is this enough checking? What about ==? + if node.type != IndentationTypes.BACKSLASH: + if node.type != IndentationTypes.SUITE: + self.add_issue(part, 502, 'The backslash is redundant between brackets') + else: + indentation = node.indentation + if self._in_suite_introducer and node.type == IndentationTypes.SUITE: + indentation += self._config.indentation + + self._indentation_tos = BackslashNode( + self._config, + indentation, + part, + spacing, + parent=self._indentation_tos + ) + elif self._on_newline: + indentation = spacing.value + if node.type == IndentationTypes.BACKSLASH \ + and self._previous_part.type == 'newline': + self._indentation_tos = self._indentation_tos.parent + + if not self._check_tabs_spaces(spacing): + should_be_indentation = node.indentation + if type_ == 'comment': + # Comments can be dedented. So we have to care for that. + n = self._last_indentation_tos + while True: + if len(indentation) > len(n.indentation): + break + + should_be_indentation = n.indentation + + self._last_indentation_tos = n + if n == node: + break + n = n.parent + + if self._new_statement: + if type_ == 'newline': + if indentation: + self.add_issue(spacing, 291, 'Trailing whitespace') + elif indentation != should_be_indentation: + s = '%s %s' % (len(self._config.indentation), self._indentation_type) + self.add_issue(part, 111, 'Indentation is not a multiple of ' + s) + else: + if value in '])}': + should_be_indentation = node.bracket_indentation + else: + should_be_indentation = node.indentation + if self._in_suite_introducer and indentation == \ + node.get_latest_suite_node().indentation \ + + self._config.indentation: + self.add_issue(part, 129, "Line with same indent as next logical block") + elif indentation != should_be_indentation: + if not self._check_tabs_spaces(spacing) and part.value not in \ + {'\n', '\r\n', '\r'}: + if value in '])}': + if node.type == IndentationTypes.VERTICAL_BRACKET: + self.add_issue( + part, + 124, + "Closing bracket does not match visual indentation" + ) + else: + self.add_issue( + part, + 123, + "Losing bracket does not match " + "indentation of opening bracket's line" + ) + else: + if len(indentation) < len(should_be_indentation): + if node.type == IndentationTypes.VERTICAL_BRACKET: + self.add_issue( + part, + 128, + 'Continuation line under-indented for visual indent' + ) + elif node.type == IndentationTypes.BACKSLASH: + self.add_issue( + part, + 122, + 'Continuation line missing indentation or outdented' + ) + elif node.type == IndentationTypes.IMPLICIT: + self.add_issue(part, 135, 'xxx') + else: + self.add_issue( + part, + 121, + 'Continuation line under-indented for hanging indent' + ) + else: + if node.type == IndentationTypes.VERTICAL_BRACKET: + self.add_issue( + part, + 127, + 'Continuation line over-indented for visual indent' + ) + elif node.type == IndentationTypes.IMPLICIT: + self.add_issue(part, 136, 'xxx') + else: + self.add_issue( + part, + 126, + 'Continuation line over-indented for hanging indent' + ) + else: + self._check_spacing(part, spacing) + + self._check_line_length(part, spacing) + # ------------------------------- + # Finalizing. Updating the state. + # ------------------------------- + if value and value in '()[]{}' and type_ != 'error_leaf' \ + and part.parent.type != 'error_node': + if value in _OPENING_BRACKETS: + self._indentation_tos = BracketNode( + self._config, part, + parent=self._indentation_tos, + in_suite_introducer=self._in_suite_introducer + ) + else: + assert node.type != IndentationTypes.IMPLICIT + self._indentation_tos = self._indentation_tos.parent + elif value in ('=', ':') and self._implicit_indentation_possible \ + and part.parent.type in _IMPLICIT_INDENTATION_TYPES: + indentation = node.indentation + self._indentation_tos = ImplicitNode( + self._config, part, parent=self._indentation_tos + ) + + self._on_newline = type_ in ('newline', 'backslash', 'bom') + + self._previous_part = part + self._previous_spacing = spacing + + def _check_line_length(self, part, spacing): + if part.type == 'backslash': + last_column = part.start_pos[1] + 1 + else: + last_column = part.end_pos[1] + if last_column > self._config.max_characters \ + and spacing.start_pos[1] <= self._config.max_characters: + # Special case for long URLs in multi-line docstrings or comments, + # but still report the error when the 72 first chars are whitespaces. + report = True + if part.type == 'comment': + splitted = part.value[1:].split() + if len(splitted) == 1 \ + and (part.end_pos[1] - len(splitted[0])) < 72: + report = False + if report: + self.add_issue( + part, + 501, + 'Line too long (%s > %s characters)' % + (last_column, self._config.max_characters), + ) + + def _check_spacing(self, part, spacing): + def add_if_spaces(*args): + if spaces: + return self.add_issue(*args) + + def add_not_spaces(*args): + if not spaces: + return self.add_issue(*args) + + spaces = spacing.value + prev = self._previous_part + if prev is not None and prev.type == 'error_leaf' or part.type == 'error_leaf': + return + + type_ = part.type + if '\t' in spaces: + self.add_issue(spacing, 223, 'Used tab to separate tokens') + elif type_ == 'comment': + if len(spaces) < self._config.spaces_before_comment: + self.add_issue(spacing, 261, 'At least two spaces before inline comment') + elif type_ == 'newline': + add_if_spaces(spacing, 291, 'Trailing whitespace') + elif len(spaces) > 1: + self.add_issue(spacing, 221, 'Multiple spaces used') + else: + if prev in _OPENING_BRACKETS: + message = "Whitespace after '%s'" % part.value + add_if_spaces(spacing, 201, message) + elif part in _CLOSING_BRACKETS: + message = "Whitespace before '%s'" % part.value + add_if_spaces(spacing, 202, message) + elif part in (',', ';') or part == ':' \ + and part.parent.type not in _POSSIBLE_SLICE_PARENTS: + message = "Whitespace before '%s'" % part.value + add_if_spaces(spacing, 203, message) + elif prev == ':' and prev.parent.type in _POSSIBLE_SLICE_PARENTS: + pass # TODO + elif prev in (',', ';', ':'): + add_not_spaces(spacing, 231, "missing whitespace after '%s'") + elif part == ':': # Is a subscript + # TODO + pass + elif part in ('*', '**') and part.parent.type not in _NON_STAR_TYPES \ + or prev in ('*', '**') \ + and prev.parent.type not in _NON_STAR_TYPES: + # TODO + pass + elif prev in _FACTOR and prev.parent.type == 'factor': + pass + elif prev == '@' and prev.parent.type == 'decorator': + pass # TODO should probably raise an error if there's a space here + elif part in _NEEDS_SPACE or prev in _NEEDS_SPACE: + if part == '=' and part.parent.type in ('argument', 'param') \ + or prev == '=' and prev.parent.type in ('argument', 'param'): + if part == '=': + param = part.parent + else: + param = prev.parent + if param.type == 'param' and param.annotation: + add_not_spaces(spacing, 252, 'Expected spaces around annotation equals') + else: + add_if_spaces( + spacing, + 251, + 'Unexpected spaces around keyword / parameter equals' + ) + elif part in _BITWISE_OPERATOR or prev in _BITWISE_OPERATOR: + add_not_spaces( + spacing, + 227, + 'Missing whitespace around bitwise or shift operator' + ) + elif part == '%' or prev == '%': + add_not_spaces(spacing, 228, 'Missing whitespace around modulo operator') + else: + message_225 = 'Missing whitespace between tokens' + add_not_spaces(spacing, 225, message_225) + elif type_ == 'keyword' or prev.type == 'keyword': + add_not_spaces(spacing, 275, 'Missing whitespace around keyword') + else: + prev_spacing = self._previous_spacing + if prev in _ALLOW_SPACE and spaces != prev_spacing.value \ + and '\n' not in self._previous_leaf.prefix \ + and '\r' not in self._previous_leaf.prefix: + message = "Whitespace before operator doesn't match with whitespace after" + self.add_issue(spacing, 229, message) + + if spaces and part not in _ALLOW_SPACE and prev not in _ALLOW_SPACE: + message_225 = 'Missing whitespace between tokens' + # self.add_issue(spacing, 225, message_225) + # TODO why only brackets? + if part in _OPENING_BRACKETS: + message = "Whitespace before '%s'" % part.value + add_if_spaces(spacing, 211, message) + + def _analyse_non_prefix(self, leaf): + typ = leaf.type + if typ == 'name' and leaf.value in ('l', 'O', 'I'): + if leaf.is_definition(): + message = "Do not define %s named 'l', 'O', or 'I' one line" + if leaf.parent.type == 'class' and leaf.parent.name == leaf: + self.add_issue(leaf, 742, message % 'classes') + elif leaf.parent.type == 'function' and leaf.parent.name == leaf: + self.add_issue(leaf, 743, message % 'function') + else: + self.add_issuadd_issue(741, message % 'variables', leaf) + elif leaf.value == ':': + if isinstance(leaf.parent, (Flow, Scope)) and leaf.parent.type != 'lambdef': + next_leaf = leaf.get_next_leaf() + if next_leaf.type != 'newline': + if leaf.parent.type == 'funcdef': + self.add_issue(next_leaf, 704, 'Multiple statements on one line (def)') + else: + self.add_issue(next_leaf, 701, 'Multiple statements on one line (colon)') + elif leaf.value == ';': + if leaf.get_next_leaf().type in ('newline', 'endmarker'): + self.add_issue(leaf, 703, 'Statement ends with a semicolon') + else: + self.add_issue(leaf, 702, 'Multiple statements on one line (semicolon)') + elif leaf.value in ('==', '!='): + comparison = leaf.parent + index = comparison.children.index(leaf) + left = comparison.children[index - 1] + right = comparison.children[index + 1] + for node in left, right: + if node.type == 'keyword' or node.type == 'name': + if node.value == 'None': + message = "comparison to None should be 'if cond is None:'" + self.add_issue(leaf, 711, message) + break + elif node.value in ('True', 'False'): + message = "comparison to False/True should be " \ + "'if cond is True:' or 'if cond:'" + self.add_issue(leaf, 712, message) + break + elif leaf.value in ('in', 'is'): + comparison = leaf.parent + if comparison.type == 'comparison' and comparison.parent.type == 'not_test': + if leaf.value == 'in': + self.add_issue(leaf, 713, "test for membership should be 'not in'") + else: + self.add_issue(leaf, 714, "test for object identity should be 'is not'") + elif typ == 'string': + # Checking multiline strings + for i, line in enumerate(leaf.value.splitlines()[1:]): + indentation = re.match(r'[ \t]*', line).group(0) + start_pos = leaf.line + i, len(indentation) + # TODO check multiline indentation. + start_pos + elif typ == 'endmarker': + if self._newline_count >= 2: + self.add_issue(leaf, 391, 'Blank line at end of file') + + def add_issue(self, node, code, message): + if self._previous_leaf is not None: + if self._previous_leaf.search_ancestor('error_node') is not None: + return + if self._previous_leaf.type == 'error_leaf': + return + if node.search_ancestor('error_node') is not None: + return + if code in (901, 903): + # 901 and 903 are raised by the ErrorFinder. + super().add_issue(node, code, message) + else: + # Skip ErrorFinder here, because it has custom behavior. + super(ErrorFinder, self).add_issue(node, code, message) + + +class PEP8NormalizerConfig(ErrorFinderConfig): + normalizer_class = PEP8Normalizer + """ + Normalizing to PEP8. Not really implemented, yet. + """ + def __init__(self, indentation=' ' * 4, hanging_indentation=None, + max_characters=79, spaces_before_comment=2): + self.indentation = indentation + if hanging_indentation is None: + hanging_indentation = indentation + self.hanging_indentation = hanging_indentation + self.closing_bracket_hanging_indentation = '' + self.break_after_binary = False + self.max_characters = max_characters + self.spaces_before_comment = spaces_before_comment + + +# TODO this is not yet ready. +# @PEP8Normalizer.register_rule(type='endmarker') +class BlankLineAtEnd(Rule): + code = 392 + message = 'Blank line at end of file' + + def is_issue(self, leaf): + return self._newline_count >= 2 diff --git a/myenv/lib/python3.10/site-packages/parso/python/prefix.py b/myenv/lib/python3.10/site-packages/parso/python/prefix.py new file mode 100644 index 000000000..6b8d59e69 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/prefix.py @@ -0,0 +1,106 @@ +import re +from codecs import BOM_UTF8 +from typing import Tuple + +from parso.python.tokenize import group + +unicode_bom = BOM_UTF8.decode('utf-8') + + +class PrefixPart: + def __init__(self, leaf, typ, value, spacing='', start_pos=None): + assert start_pos is not None + self.parent = leaf + self.type = typ + self.value = value + self.spacing = spacing + self.start_pos: Tuple[int, int] = start_pos + + @property + def end_pos(self) -> Tuple[int, int]: + if self.value.endswith('\n') or self.value.endswith('\r'): + return self.start_pos[0] + 1, 0 + if self.value == unicode_bom: + # The bom doesn't have a length at the start of a Python file. + return self.start_pos + return self.start_pos[0], self.start_pos[1] + len(self.value) + + def create_spacing_part(self): + column = self.start_pos[1] - len(self.spacing) + return PrefixPart( + self.parent, 'spacing', self.spacing, + start_pos=(self.start_pos[0], column) + ) + + def __repr__(self): + return '%s(%s, %s, %s)' % ( + self.__class__.__name__, + self.type, + repr(self.value), + self.start_pos + ) + + def search_ancestor(self, *node_types): + node = self.parent + while node is not None: + if node.type in node_types: + return node + node = node.parent + return None + + +_comment = r'#[^\n\r\f]*' +_backslash = r'\\\r?\n|\\\r' +_newline = r'\r?\n|\r' +_form_feed = r'\f' +_only_spacing = '$' +_spacing = r'[ \t]*' +_bom = unicode_bom + +_regex = group( + _comment, _backslash, _newline, _form_feed, _only_spacing, _bom, + capture=True +) +_regex = re.compile(group(_spacing, capture=True) + _regex) + + +_types = { + '#': 'comment', + '\\': 'backslash', + '\f': 'formfeed', + '\n': 'newline', + '\r': 'newline', + unicode_bom: 'bom' +} + + +def split_prefix(leaf, start_pos): + line, column = start_pos + start = 0 + value = spacing = '' + bom = False + while start != len(leaf.prefix): + match = _regex.match(leaf.prefix, start) + spacing = match.group(1) + value = match.group(2) + if not value: + break + type_ = _types[value[0]] + yield PrefixPart( + leaf, type_, value, spacing, + start_pos=(line, column + start - int(bom) + len(spacing)) + ) + if type_ == 'bom': + bom = True + + start = match.end(0) + if value.endswith('\n') or value.endswith('\r'): + line += 1 + column = -start + + if value: + spacing = '' + yield PrefixPart( + leaf, 'spacing', spacing, + start_pos=(line, column + start) + ) diff --git a/myenv/lib/python3.10/site-packages/parso/python/token.py b/myenv/lib/python3.10/site-packages/parso/python/token.py new file mode 100644 index 000000000..9b6f4c7c1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/token.py @@ -0,0 +1,31 @@ +from __future__ import absolute_import + +from enum import Enum + + +class TokenType: + name: str + contains_syntax: bool + + def __init__(self, name: str, contains_syntax: bool = False): + self.name = name + self.contains_syntax = contains_syntax + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, self.name) + + +class PythonTokenTypes(Enum): + STRING = TokenType('STRING') + NUMBER = TokenType('NUMBER') + NAME = TokenType('NAME', contains_syntax=True) + ERRORTOKEN = TokenType('ERRORTOKEN') + NEWLINE = TokenType('NEWLINE') + INDENT = TokenType('INDENT') + DEDENT = TokenType('DEDENT') + ERROR_DEDENT = TokenType('ERROR_DEDENT') + FSTRING_STRING = TokenType('FSTRING_STRING') + FSTRING_START = TokenType('FSTRING_START') + FSTRING_END = TokenType('FSTRING_END') + OP = TokenType('OP', contains_syntax=True) + ENDMARKER = TokenType('ENDMARKER') diff --git a/myenv/lib/python3.10/site-packages/parso/python/tokenize.py b/myenv/lib/python3.10/site-packages/parso/python/tokenize.py new file mode 100644 index 000000000..e3ffe440b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/tokenize.py @@ -0,0 +1,687 @@ +# -*- coding: utf-8 -*- +""" +This tokenizer has been copied from the ``tokenize.py`` standard library +tokenizer. The reason was simple: The standard library tokenizer fails +if the indentation is not right. To make it possible to do error recovery the + tokenizer needed to be rewritten. + +Basically this is a stripped down version of the standard library module, so +you can read the documentation there. Additionally we included some speed and +memory optimizations here. +""" +from __future__ import absolute_import + +import sys +import re +import itertools as _itertools +from codecs import BOM_UTF8 +from typing import NamedTuple, Tuple, Iterator, Iterable, List, Dict, \ + Pattern, Set + +from parso.python.token import PythonTokenTypes +from parso.utils import split_lines, PythonVersionInfo, parse_version_string + + +# Maximum code point of Unicode 6.0: 0x10ffff (1,114,111) +MAX_UNICODE = '\U0010ffff' + +STRING = PythonTokenTypes.STRING +NAME = PythonTokenTypes.NAME +NUMBER = PythonTokenTypes.NUMBER +OP = PythonTokenTypes.OP +NEWLINE = PythonTokenTypes.NEWLINE +INDENT = PythonTokenTypes.INDENT +DEDENT = PythonTokenTypes.DEDENT +ENDMARKER = PythonTokenTypes.ENDMARKER +ERRORTOKEN = PythonTokenTypes.ERRORTOKEN +ERROR_DEDENT = PythonTokenTypes.ERROR_DEDENT +FSTRING_START = PythonTokenTypes.FSTRING_START +FSTRING_STRING = PythonTokenTypes.FSTRING_STRING +FSTRING_END = PythonTokenTypes.FSTRING_END + + +class TokenCollection(NamedTuple): + pseudo_token: Pattern + single_quoted: Set[str] + triple_quoted: Set[str] + endpats: Dict[str, Pattern] + whitespace: Pattern + fstring_pattern_map: Dict[str, str] + always_break_tokens: Tuple[str] + + +BOM_UTF8_STRING = BOM_UTF8.decode('utf-8') + +_token_collection_cache: Dict[PythonVersionInfo, TokenCollection] = {} + + +def group(*choices, capture=False, **kwargs): + assert not kwargs + + start = '(' + if not capture: + start += '?:' + return start + '|'.join(choices) + ')' + + +def maybe(*choices): + return group(*choices) + '?' + + +# Return the empty string, plus all of the valid string prefixes. +def _all_string_prefixes(*, include_fstring=False, only_fstring=False): + def different_case_versions(prefix): + for s in _itertools.product(*[(c, c.upper()) for c in prefix]): + yield ''.join(s) + # The valid string prefixes. Only contain the lower case versions, + # and don't contain any permuations (include 'fr', but not + # 'rf'). The various permutations will be generated. + valid_string_prefixes = ['b', 'r', 'u', 'br'] + + result = {''} + if include_fstring: + f = ['f', 'fr'] + if only_fstring: + valid_string_prefixes = f + result = set() + else: + valid_string_prefixes += f + elif only_fstring: + return set() + + # if we add binary f-strings, add: ['fb', 'fbr'] + for prefix in valid_string_prefixes: + for t in _itertools.permutations(prefix): + # create a list with upper and lower versions of each + # character + result.update(different_case_versions(t)) + return result + + +def _compile(expr): + return re.compile(expr, re.UNICODE) + + +def _get_token_collection(version_info): + try: + return _token_collection_cache[tuple(version_info)] + except KeyError: + _token_collection_cache[tuple(version_info)] = result = \ + _create_token_collection(version_info) + return result + + +unicode_character_name = r'[A-Za-z0-9\-]+(?: [A-Za-z0-9\-]+)*' +fstring_string_single_line = _compile( + r'(?:\{\{|\}\}|\\N\{' + unicode_character_name + + r'\}|\\(?:\r\n?|\n)|\\[^\r\nN]|[^{}\r\n\\])+' +) +fstring_string_multi_line = _compile( + r'(?:\{\{|\}\}|\\N\{' + unicode_character_name + r'\}|\\[^N]|[^{}\\])+' +) +fstring_format_spec_single_line = _compile(r'(?:\\(?:\r\n?|\n)|[^{}\r\n])+') +fstring_format_spec_multi_line = _compile(r'[^{}]+') + + +def _create_token_collection(version_info): + # Note: we use unicode matching for names ("\w") but ascii matching for + # number literals. + Whitespace = r'[ \f\t]*' + whitespace = _compile(Whitespace) + Comment = r'#[^\r\n]*' + Name = '([A-Za-z_0-9\u0080-' + MAX_UNICODE + ']+)' + + Hexnumber = r'0[xX](?:_?[0-9a-fA-F])+' + Binnumber = r'0[bB](?:_?[01])+' + Octnumber = r'0[oO](?:_?[0-7])+' + Decnumber = r'(?:0(?:_?0)*|[1-9](?:_?[0-9])*)' + Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber) + Exponent = r'[eE][-+]?[0-9](?:_?[0-9])*' + Pointfloat = group(r'[0-9](?:_?[0-9])*\.(?:[0-9](?:_?[0-9])*)?', + r'\.[0-9](?:_?[0-9])*') + maybe(Exponent) + Expfloat = r'[0-9](?:_?[0-9])*' + Exponent + Floatnumber = group(Pointfloat, Expfloat) + Imagnumber = group(r'[0-9](?:_?[0-9])*[jJ]', Floatnumber + r'[jJ]') + Number = group(Imagnumber, Floatnumber, Intnumber) + + # Note that since _all_string_prefixes includes the empty string, + # StringPrefix can be the empty string (making it optional). + possible_prefixes = _all_string_prefixes() + StringPrefix = group(*possible_prefixes) + StringPrefixWithF = group(*_all_string_prefixes(include_fstring=True)) + fstring_prefixes = _all_string_prefixes(include_fstring=True, only_fstring=True) + FStringStart = group(*fstring_prefixes) + + # Tail end of ' string. + Single = r"(?:\\.|[^'\\])*'" + # Tail end of " string. + Double = r'(?:\\.|[^"\\])*"' + # Tail end of ''' string. + Single3 = r"(?:\\.|'(?!'')|[^'\\])*'''" + # Tail end of """ string. + Double3 = r'(?:\\.|"(?!"")|[^"\\])*"""' + Triple = group(StringPrefixWithF + "'''", StringPrefixWithF + '"""') + + # Because of leftmost-then-longest match semantics, be sure to put the + # longest operators first (e.g., if = came before ==, == would get + # recognized as two instances of =). + Operator = group(r"\*\*=?", r">>=?", r"<<=?", + r"//=?", r"->", + r"[+\-*/%&@`|^!=<>]=?", + r"~") + + Bracket = '[][(){}]' + + special_args = [r'\.\.\.', r'\r\n?', r'\n', r'[;.,@]'] + if version_info >= (3, 8): + special_args.insert(0, ":=?") + else: + special_args.insert(0, ":") + Special = group(*special_args) + + Funny = group(Operator, Bracket, Special) + + # First (or only) line of ' or " string. + ContStr = group(StringPrefix + r"'[^\r\n'\\]*(?:\\.[^\r\n'\\]*)*" + + group("'", r'\\(?:\r\n?|\n)'), + StringPrefix + r'"[^\r\n"\\]*(?:\\.[^\r\n"\\]*)*' + + group('"', r'\\(?:\r\n?|\n)')) + pseudo_extra_pool = [Comment, Triple] + all_quotes = '"', "'", '"""', "'''" + if fstring_prefixes: + pseudo_extra_pool.append(FStringStart + group(*all_quotes)) + + PseudoExtras = group(r'\\(?:\r\n?|\n)|\Z', *pseudo_extra_pool) + PseudoToken = group(Whitespace, capture=True) + \ + group(PseudoExtras, Number, Funny, ContStr, Name, capture=True) + + # For a given string prefix plus quotes, endpats maps it to a regex + # to match the remainder of that string. _prefix can be empty, for + # a normal single or triple quoted string (with no prefix). + endpats = {} + for _prefix in possible_prefixes: + endpats[_prefix + "'"] = _compile(Single) + endpats[_prefix + '"'] = _compile(Double) + endpats[_prefix + "'''"] = _compile(Single3) + endpats[_prefix + '"""'] = _compile(Double3) + + # A set of all of the single and triple quoted string prefixes, + # including the opening quotes. + single_quoted = set() + triple_quoted = set() + fstring_pattern_map = {} + for t in possible_prefixes: + for quote in '"', "'": + single_quoted.add(t + quote) + + for quote in '"""', "'''": + triple_quoted.add(t + quote) + + for t in fstring_prefixes: + for quote in all_quotes: + fstring_pattern_map[t + quote] = quote + + ALWAYS_BREAK_TOKENS = (';', 'import', 'class', 'def', 'try', 'except', + 'finally', 'while', 'with', 'return', 'continue', + 'break', 'del', 'pass', 'global', 'assert', 'nonlocal') + pseudo_token_compiled = _compile(PseudoToken) + return TokenCollection( + pseudo_token_compiled, single_quoted, triple_quoted, endpats, + whitespace, fstring_pattern_map, set(ALWAYS_BREAK_TOKENS) + ) + + +class Token(NamedTuple): + type: PythonTokenTypes + string: str + start_pos: Tuple[int, int] + prefix: str + + @property + def end_pos(self) -> Tuple[int, int]: + lines = split_lines(self.string) + if len(lines) > 1: + return self.start_pos[0] + len(lines) - 1, 0 + else: + return self.start_pos[0], self.start_pos[1] + len(self.string) + + +class PythonToken(Token): + def __repr__(self): + return ('TokenInfo(type=%s, string=%r, start_pos=%r, prefix=%r)' % + self._replace(type=self.type.name)) + + +class FStringNode: + def __init__(self, quote): + self.quote = quote + self.parentheses_count = 0 + self.previous_lines = '' + self.last_string_start_pos = None + # In the syntax there can be multiple format_spec's nested: + # {x:{y:3}} + self.format_spec_count = 0 + + def open_parentheses(self, character): + self.parentheses_count += 1 + + def close_parentheses(self, character): + self.parentheses_count -= 1 + if self.parentheses_count == 0: + # No parentheses means that the format spec is also finished. + self.format_spec_count = 0 + + def allow_multiline(self): + return len(self.quote) == 3 + + def is_in_expr(self): + return self.parentheses_count > self.format_spec_count + + def is_in_format_spec(self): + return not self.is_in_expr() and self.format_spec_count + + +def _close_fstring_if_necessary(fstring_stack, string, line_nr, column, additional_prefix): + for fstring_stack_index, node in enumerate(fstring_stack): + lstripped_string = string.lstrip() + len_lstrip = len(string) - len(lstripped_string) + if lstripped_string.startswith(node.quote): + token = PythonToken( + FSTRING_END, + node.quote, + (line_nr, column + len_lstrip), + prefix=additional_prefix+string[:len_lstrip], + ) + additional_prefix = '' + assert not node.previous_lines + del fstring_stack[fstring_stack_index:] + return token, '', len(node.quote) + len_lstrip + return None, additional_prefix, 0 + + +def _find_fstring_string(endpats, fstring_stack, line, lnum, pos): + tos = fstring_stack[-1] + allow_multiline = tos.allow_multiline() + if tos.is_in_format_spec(): + if allow_multiline: + regex = fstring_format_spec_multi_line + else: + regex = fstring_format_spec_single_line + else: + if allow_multiline: + regex = fstring_string_multi_line + else: + regex = fstring_string_single_line + + match = regex.match(line, pos) + if match is None: + return tos.previous_lines, pos + + if not tos.previous_lines: + tos.last_string_start_pos = (lnum, pos) + + string = match.group(0) + for fstring_stack_node in fstring_stack: + end_match = endpats[fstring_stack_node.quote].match(string) + if end_match is not None: + string = end_match.group(0)[:-len(fstring_stack_node.quote)] + + new_pos = pos + new_pos += len(string) + # even if allow_multiline is False, we still need to check for trailing + # newlines, because a single-line f-string can contain line continuations + if string.endswith('\n') or string.endswith('\r'): + tos.previous_lines += string + string = '' + else: + string = tos.previous_lines + string + + return string, new_pos + + +def tokenize( + code: str, *, version_info: PythonVersionInfo, start_pos: Tuple[int, int] = (1, 0) +) -> Iterator[PythonToken]: + """Generate tokens from a the source code (string).""" + lines = split_lines(code, keepends=True) + return tokenize_lines(lines, version_info=version_info, start_pos=start_pos) + + +def _print_tokens(func): + """ + A small helper function to help debug the tokenize_lines function. + """ + def wrapper(*args, **kwargs): + for token in func(*args, **kwargs): + print(token) # This print is intentional for debugging! + yield token + + return wrapper + + +# @_print_tokens +def tokenize_lines( + lines: Iterable[str], + *, + version_info: PythonVersionInfo, + indents: List[int] = None, + start_pos: Tuple[int, int] = (1, 0), + is_first_token=True, +) -> Iterator[PythonToken]: + """ + A heavily modified Python standard library tokenizer. + + Additionally to the default information, yields also the prefix of each + token. This idea comes from lib2to3. The prefix contains all information + that is irrelevant for the parser like newlines in parentheses or comments. + """ + def dedent_if_necessary(start): + while start < indents[-1]: + if start > indents[-2]: + yield PythonToken(ERROR_DEDENT, '', (lnum, start), '') + indents[-1] = start + break + indents.pop() + yield PythonToken(DEDENT, '', spos, '') + + pseudo_token, single_quoted, triple_quoted, endpats, whitespace, \ + fstring_pattern_map, always_break_tokens, = \ + _get_token_collection(version_info) + paren_level = 0 # count parentheses + if indents is None: + indents = [0] + max_ = 0 + numchars = '0123456789' + contstr = '' + contline: str + contstr_start: Tuple[int, int] + endprog: Pattern + # We start with a newline. This makes indent at the first position + # possible. It's not valid Python, but still better than an INDENT in the + # second line (and not in the first). This makes quite a few things in + # Jedi's fast parser possible. + new_line = True + prefix = '' # Should never be required, but here for safety + additional_prefix = '' + lnum = start_pos[0] - 1 + fstring_stack: List[FStringNode] = [] + for line in lines: # loop over lines in stream + lnum += 1 + pos = 0 + max_ = len(line) + if is_first_token: + if line.startswith(BOM_UTF8_STRING): + additional_prefix = BOM_UTF8_STRING + line = line[1:] + max_ = len(line) + + # Fake that the part before was already parsed. + line = '^' * start_pos[1] + line + pos = start_pos[1] + max_ += start_pos[1] + + is_first_token = False + + if contstr: # continued string + endmatch = endprog.match(line) # noqa: F821 + if endmatch: + pos = endmatch.end(0) + yield PythonToken( + STRING, contstr + line[:pos], + contstr_start, prefix) # noqa: F821 + contstr = '' + contline = '' + else: + contstr = contstr + line + contline = contline + line + continue + + while pos < max_: + if fstring_stack: + tos = fstring_stack[-1] + if not tos.is_in_expr(): + string, pos = _find_fstring_string(endpats, fstring_stack, line, lnum, pos) + if string: + yield PythonToken( + FSTRING_STRING, string, + tos.last_string_start_pos, + # Never has a prefix because it can start anywhere and + # include whitespace. + prefix='' + ) + tos.previous_lines = '' + continue + if pos == max_: + break + + rest = line[pos:] + fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary( + fstring_stack, + rest, + lnum, + pos, + additional_prefix, + ) + pos += quote_length + if fstring_end_token is not None: + yield fstring_end_token + continue + + # in an f-string, match until the end of the string + if fstring_stack: + string_line = line + for fstring_stack_node in fstring_stack: + quote = fstring_stack_node.quote + end_match = endpats[quote].match(line, pos) + if end_match is not None: + end_match_string = end_match.group(0) + if len(end_match_string) - len(quote) + pos < len(string_line): + string_line = line[:pos] + end_match_string[:-len(quote)] + pseudomatch = pseudo_token.match(string_line, pos) + else: + pseudomatch = pseudo_token.match(line, pos) + + if pseudomatch: + prefix = additional_prefix + pseudomatch.group(1) + additional_prefix = '' + start, pos = pseudomatch.span(2) + spos = (lnum, start) + token = pseudomatch.group(2) + if token == '': + assert prefix + additional_prefix = prefix + # This means that we have a line with whitespace/comments at + # the end, which just results in an endmarker. + break + initial = token[0] + else: + match = whitespace.match(line, pos) + initial = line[match.end()] + start = match.end() + spos = (lnum, start) + + if new_line and initial not in '\r\n#' and (initial != '\\' or pseudomatch is None): + new_line = False + if paren_level == 0 and not fstring_stack: + indent_start = start + if indent_start > indents[-1]: + yield PythonToken(INDENT, '', spos, '') + indents.append(indent_start) + yield from dedent_if_necessary(indent_start) + + if not pseudomatch: # scan for tokens + match = whitespace.match(line, pos) + if new_line and paren_level == 0 and not fstring_stack: + yield from dedent_if_necessary(match.end()) + pos = match.end() + new_line = False + yield PythonToken( + ERRORTOKEN, line[pos], (lnum, pos), + additional_prefix + match.group(0) + ) + additional_prefix = '' + pos += 1 + continue + + if (initial in numchars # ordinary number + or (initial == '.' and token != '.' and token != '...')): + yield PythonToken(NUMBER, token, spos, prefix) + elif pseudomatch.group(3) is not None: # ordinary name + if token in always_break_tokens and (fstring_stack or paren_level): + fstring_stack[:] = [] + paren_level = 0 + # We only want to dedent if the token is on a new line. + m = re.match(r'[ \f\t]*$', line[:start]) + if m is not None: + yield from dedent_if_necessary(m.end()) + if token.isidentifier(): + yield PythonToken(NAME, token, spos, prefix) + else: + yield from _split_illegal_unicode_name(token, spos, prefix) + elif initial in '\r\n': + if any(not f.allow_multiline() for f in fstring_stack): + fstring_stack.clear() + + if not new_line and paren_level == 0 and not fstring_stack: + yield PythonToken(NEWLINE, token, spos, prefix) + else: + additional_prefix = prefix + token + new_line = True + elif initial == '#': # Comments + assert not token.endswith("\n") and not token.endswith("\r") + if fstring_stack and fstring_stack[-1].is_in_expr(): + # `#` is not allowed in f-string expressions + yield PythonToken(ERRORTOKEN, initial, spos, prefix) + pos = start + 1 + else: + additional_prefix = prefix + token + elif token in triple_quoted: + endprog = endpats[token] + endmatch = endprog.match(line, pos) + if endmatch: # all on one line + pos = endmatch.end(0) + token = line[start:pos] + yield PythonToken(STRING, token, spos, prefix) + else: + contstr_start = spos # multiple lines + contstr = line[start:] + contline = line + break + + # Check up to the first 3 chars of the token to see if + # they're in the single_quoted set. If so, they start + # a string. + # We're using the first 3, because we're looking for + # "rb'" (for example) at the start of the token. If + # we switch to longer prefixes, this needs to be + # adjusted. + # Note that initial == token[:1]. + # Also note that single quote checking must come after + # triple quote checking (above). + elif initial in single_quoted or \ + token[:2] in single_quoted or \ + token[:3] in single_quoted: + if token[-1] in '\r\n': # continued string + # This means that a single quoted string ends with a + # backslash and is continued. + contstr_start = lnum, start + endprog = (endpats.get(initial) or endpats.get(token[1]) + or endpats.get(token[2])) + contstr = line[start:] + contline = line + break + else: # ordinary string + yield PythonToken(STRING, token, spos, prefix) + elif token in fstring_pattern_map: # The start of an fstring. + fstring_stack.append(FStringNode(fstring_pattern_map[token])) + yield PythonToken(FSTRING_START, token, spos, prefix) + elif initial == '\\' and line[start:] in ('\\\n', '\\\r\n', '\\\r'): # continued stmt + additional_prefix += prefix + line[start:] + break + else: + if token in '([{': + if fstring_stack: + fstring_stack[-1].open_parentheses(token) + else: + paren_level += 1 + elif token in ')]}': + if fstring_stack: + fstring_stack[-1].close_parentheses(token) + else: + if paren_level: + paren_level -= 1 + elif token.startswith(':') and fstring_stack \ + and fstring_stack[-1].parentheses_count \ + - fstring_stack[-1].format_spec_count == 1: + # `:` and `:=` both count + fstring_stack[-1].format_spec_count += 1 + token = ':' + pos = start + 1 + + yield PythonToken(OP, token, spos, prefix) + + if contstr: + yield PythonToken(ERRORTOKEN, contstr, contstr_start, prefix) + if contstr.endswith('\n') or contstr.endswith('\r'): + new_line = True + + if fstring_stack: + tos = fstring_stack[-1] + if tos.previous_lines: + yield PythonToken( + FSTRING_STRING, tos.previous_lines, + tos.last_string_start_pos, + # Never has a prefix because it can start anywhere and + # include whitespace. + prefix='' + ) + + end_pos = lnum, max_ + # As the last position we just take the maximally possible position. We + # remove -1 for the last new line. + for indent in indents[1:]: + indents.pop() + yield PythonToken(DEDENT, '', end_pos, '') + yield PythonToken(ENDMARKER, '', end_pos, additional_prefix) + + +def _split_illegal_unicode_name(token, start_pos, prefix): + def create_token(): + return PythonToken(ERRORTOKEN if is_illegal else NAME, found, pos, prefix) + + found = '' + is_illegal = False + pos = start_pos + for i, char in enumerate(token): + if is_illegal: + if char.isidentifier(): + yield create_token() + found = char + is_illegal = False + prefix = '' + pos = start_pos[0], start_pos[1] + i + else: + found += char + else: + new_found = found + char + if new_found.isidentifier(): + found = new_found + else: + if found: + yield create_token() + prefix = '' + pos = start_pos[0], start_pos[1] + i + found = char + is_illegal = True + + if found: + yield create_token() + + +if __name__ == "__main__": + path = sys.argv[1] + with open(path) as f: + code = f.read() + + for token in tokenize(code, version_info=parse_version_string('3.10')): + print(token) diff --git a/myenv/lib/python3.10/site-packages/parso/python/tree.py b/myenv/lib/python3.10/site-packages/parso/python/tree.py new file mode 100644 index 000000000..ebb408703 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/python/tree.py @@ -0,0 +1,1242 @@ +""" +This is the syntax tree for Python 3 syntaxes. The classes represent +syntax elements like functions and imports. + +All of the nodes can be traced back to the `Python grammar file +`_. If you want to know how +a tree is structured, just analyse that file (for each Python version it's a +bit different). + +There's a lot of logic here that makes it easier for Jedi (and other libraries) +to deal with a Python syntax tree. + +By using :py:meth:`parso.tree.NodeOrLeaf.get_code` on a module, you can get +back the 1-to-1 representation of the input given to the parser. This is +important if you want to refactor a parser tree. + +>>> from parso import parse +>>> parser = parse('import os') +>>> module = parser.get_root_node() +>>> module + + +Any subclasses of :class:`Scope`, including :class:`Module` has an attribute +:attr:`iter_imports `: + +>>> list(module.iter_imports()) +[] + +Changes to the Python Grammar +----------------------------- + +A few things have changed when looking at Python grammar files: + +- :class:`Param` does not exist in Python grammar files. It is essentially a + part of a ``parameters`` node. |parso| splits it up to make it easier to + analyse parameters. However this just makes it easier to deal with the syntax + tree, it doesn't actually change the valid syntax. +- A few nodes like `lambdef` and `lambdef_nocond` have been merged in the + syntax tree to make it easier to do deal with them. + +Parser Tree Classes +------------------- +""" + +import re +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping +from typing import Tuple + +from parso.tree import Node, BaseNode, Leaf, ErrorNode, ErrorLeaf, search_ancestor # noqa +from parso.python.prefix import split_prefix +from parso.utils import split_lines + +_FLOW_CONTAINERS = set(['if_stmt', 'while_stmt', 'for_stmt', 'try_stmt', + 'with_stmt', 'async_stmt', 'suite']) +_RETURN_STMT_CONTAINERS = set(['suite', 'simple_stmt']) | _FLOW_CONTAINERS + +_FUNC_CONTAINERS = set( + ['suite', 'simple_stmt', 'decorated', 'async_funcdef'] +) | _FLOW_CONTAINERS + +_GET_DEFINITION_TYPES = set([ + 'expr_stmt', 'sync_comp_for', 'with_stmt', 'for_stmt', 'import_name', + 'import_from', 'param', 'del_stmt', 'namedexpr_test', +]) +_IMPORTS = set(['import_name', 'import_from']) + + +class DocstringMixin: + __slots__ = () + + def get_doc_node(self): + """ + Returns the string leaf of a docstring. e.g. ``r'''foo'''``. + """ + if self.type == 'file_input': + node = self.children[0] + elif self.type in ('funcdef', 'classdef'): + node = self.children[self.children.index(':') + 1] + if node.type == 'suite': # Normally a suite + node = node.children[1] # -> NEWLINE stmt + else: # ExprStmt + simple_stmt = self.parent + c = simple_stmt.parent.children + index = c.index(simple_stmt) + if not index: + return None + node = c[index - 1] + + if node.type == 'simple_stmt': + node = node.children[0] + if node.type == 'string': + return node + return None + + +class PythonMixin: + """ + Some Python specific utilities. + """ + __slots__ = () + + def get_name_of_position(self, position): + """ + Given a (line, column) tuple, returns a :py:class:`Name` or ``None`` if + there is no name at that position. + """ + for c in self.children: + if isinstance(c, Leaf): + if c.type == 'name' and c.start_pos <= position <= c.end_pos: + return c + else: + result = c.get_name_of_position(position) + if result is not None: + return result + return None + + +class PythonLeaf(PythonMixin, Leaf): + __slots__ = () + + def _split_prefix(self): + return split_prefix(self, self.get_start_pos_of_prefix()) + + def get_start_pos_of_prefix(self): + """ + Basically calls :py:meth:`parso.tree.NodeOrLeaf.get_start_pos_of_prefix`. + """ + # TODO it is really ugly that we have to override it. Maybe change + # indent error leafs somehow? No idea how, though. + previous_leaf = self.get_previous_leaf() + if previous_leaf is not None and previous_leaf.type == 'error_leaf' \ + and previous_leaf.token_type in ('INDENT', 'DEDENT', 'ERROR_DEDENT'): + previous_leaf = previous_leaf.get_previous_leaf() + + if previous_leaf is None: # It's the first leaf. + lines = split_lines(self.prefix) + # + 1 is needed because split_lines always returns at least ['']. + return self.line - len(lines) + 1, 0 # It's the first leaf. + return previous_leaf.end_pos + + +class _LeafWithoutNewlines(PythonLeaf): + """ + Simply here to optimize performance. + """ + __slots__ = () + + @property + def end_pos(self) -> Tuple[int, int]: + return self.line, self.column + len(self.value) + + +# Python base classes +class PythonBaseNode(PythonMixin, BaseNode): + __slots__ = () + + +class PythonNode(PythonMixin, Node): + __slots__ = () + + +class PythonErrorNode(PythonMixin, ErrorNode): + __slots__ = () + + +class PythonErrorLeaf(ErrorLeaf, PythonLeaf): + __slots__ = () + + +class EndMarker(_LeafWithoutNewlines): + __slots__ = () + type = 'endmarker' + + def __repr__(self): + return "<%s: prefix=%s end_pos=%s>" % ( + type(self).__name__, repr(self.prefix), self.end_pos + ) + + +class Newline(PythonLeaf): + """Contains NEWLINE and ENDMARKER tokens.""" + __slots__ = () + type = 'newline' + + def __repr__(self): + return "<%s: %s>" % (type(self).__name__, repr(self.value)) + + +class Name(_LeafWithoutNewlines): + """ + A string. Sometimes it is important to know if the string belongs to a name + or not. + """ + type = 'name' + __slots__ = () + + def __repr__(self): + return "<%s: %s@%s,%s>" % (type(self).__name__, self.value, + self.line, self.column) + + def is_definition(self, include_setitem=False): + """ + Returns True if the name is being defined. + """ + return self.get_definition(include_setitem=include_setitem) is not None + + def get_definition(self, import_name_always=False, include_setitem=False): + """ + Returns None if there's no definition for a name. + + :param import_name_always: Specifies if an import name is always a + definition. Normally foo in `from foo import bar` is not a + definition. + """ + node = self.parent + type_ = node.type + + if type_ in ('funcdef', 'classdef'): + if self == node.name: + return node + return None + + if type_ == 'except_clause': + if self.get_previous_sibling() == 'as': + return node.parent # The try_stmt. + return None + + while node is not None: + if node.type == 'suite': + return None + if node.type in _GET_DEFINITION_TYPES: + if self in node.get_defined_names(include_setitem): + return node + if import_name_always and node.type in _IMPORTS: + return node + return None + node = node.parent + return None + + +class Literal(PythonLeaf): + __slots__ = () + + +class Number(Literal): + type = 'number' + __slots__ = () + + +class String(Literal): + type = 'string' + __slots__ = () + + @property + def string_prefix(self): + return re.match(r'\w*(?=[\'"])', self.value).group(0) + + def _get_payload(self): + match = re.search( + r'''('{3}|"{3}|'|")(.*)$''', + self.value, + flags=re.DOTALL + ) + return match.group(2)[:-len(match.group(1))] + + +class FStringString(PythonLeaf): + """ + f-strings contain f-string expressions and normal python strings. These are + the string parts of f-strings. + """ + type = 'fstring_string' + __slots__ = () + + +class FStringStart(PythonLeaf): + """ + f-strings contain f-string expressions and normal python strings. These are + the string parts of f-strings. + """ + type = 'fstring_start' + __slots__ = () + + +class FStringEnd(PythonLeaf): + """ + f-strings contain f-string expressions and normal python strings. These are + the string parts of f-strings. + """ + type = 'fstring_end' + __slots__ = () + + +class _StringComparisonMixin: + def __eq__(self, other): + """ + Make comparisons with strings easy. + Improves the readability of the parser. + """ + if isinstance(other, str): + return self.value == other + + return self is other + + def __hash__(self): + return hash(self.value) + + +class Operator(_LeafWithoutNewlines, _StringComparisonMixin): + type = 'operator' + __slots__ = () + + +class Keyword(_LeafWithoutNewlines, _StringComparisonMixin): + type = 'keyword' + __slots__ = () + + +class Scope(PythonBaseNode, DocstringMixin): + """ + Super class for the parser tree, which represents the state of a python + text file. + A Scope is either a function, class or lambda. + """ + __slots__ = () + + def __init__(self, children): + super().__init__(children) + + def iter_funcdefs(self): + """ + Returns a generator of `funcdef` nodes. + """ + return self._search_in_scope('funcdef') + + def iter_classdefs(self): + """ + Returns a generator of `classdef` nodes. + """ + return self._search_in_scope('classdef') + + def iter_imports(self): + """ + Returns a generator of `import_name` and `import_from` nodes. + """ + return self._search_in_scope('import_name', 'import_from') + + def _search_in_scope(self, *names): + def scan(children): + for element in children: + if element.type in names: + yield element + if element.type in _FUNC_CONTAINERS: + yield from scan(element.children) + + return scan(self.children) + + def get_suite(self): + """ + Returns the part that is executed by the function. + """ + return self.children[-1] + + def __repr__(self): + try: + name = self.name.value + except AttributeError: + name = '' + + return "<%s: %s@%s-%s>" % (type(self).__name__, name, + self.start_pos[0], self.end_pos[0]) + + +class Module(Scope): + """ + The top scope, which is always a module. + Depending on the underlying parser this may be a full module or just a part + of a module. + """ + __slots__ = ('_used_names',) + type = 'file_input' + + def __init__(self, children): + super().__init__(children) + self._used_names = None + + def _iter_future_import_names(self): + """ + :return: A list of future import names. + :rtype: list of str + """ + # In Python it's not allowed to use future imports after the first + # actual (non-future) statement. However this is not a linter here, + # just return all future imports. If people want to scan for issues + # they should use the API. + for imp in self.iter_imports(): + if imp.type == 'import_from' and imp.level == 0: + for path in imp.get_paths(): + names = [name.value for name in path] + if len(names) == 2 and names[0] == '__future__': + yield names[1] + + def get_used_names(self): + """ + Returns all the :class:`Name` leafs that exist in this module. This + includes both definitions and references of names. + """ + if self._used_names is None: + # Don't directly use self._used_names to eliminate a lookup. + dct = {} + + def recurse(node): + try: + children = node.children + except AttributeError: + if node.type == 'name': + arr = dct.setdefault(node.value, []) + arr.append(node) + else: + for child in children: + recurse(child) + + recurse(self) + self._used_names = UsedNamesMapping(dct) + return self._used_names + + +class Decorator(PythonBaseNode): + type = 'decorator' + __slots__ = () + + +class ClassOrFunc(Scope): + __slots__ = () + + @property + def name(self): + """ + Returns the `Name` leaf that defines the function or class name. + """ + return self.children[1] + + def get_decorators(self): + """ + :rtype: list of :class:`Decorator` + """ + decorated = self.parent + if decorated.type == 'async_funcdef': + decorated = decorated.parent + + if decorated.type == 'decorated': + if decorated.children[0].type == 'decorators': + return decorated.children[0].children + else: + return decorated.children[:1] + else: + return [] + + +class Class(ClassOrFunc): + """ + Used to store the parsed contents of a python class. + """ + type = 'classdef' + __slots__ = () + + def __init__(self, children): + super().__init__(children) + + def get_super_arglist(self): + """ + Returns the `arglist` node that defines the super classes. It returns + None if there are no arguments. + """ + if self.children[2] != '(': # Has no parentheses + return None + else: + if self.children[3] == ')': # Empty parentheses + return None + else: + return self.children[3] + + +def _create_params(parent, argslist_list): + """ + `argslist_list` is a list that can contain an argslist as a first item, but + most not. It's basically the items between the parameter brackets (which is + at most one item). + This function modifies the parser structure. It generates `Param` objects + from the normal ast. Those param objects do not exist in a normal ast, but + make the evaluation of the ast tree so much easier. + You could also say that this function replaces the argslist node with a + list of Param objects. + """ + try: + first = argslist_list[0] + except IndexError: + return [] + + if first.type in ('name', 'fpdef'): + return [Param([first], parent)] + elif first == '*': + return [first] + else: # argslist is a `typedargslist` or a `varargslist`. + if first.type == 'tfpdef': + children = [first] + else: + children = first.children + new_children = [] + start = 0 + # Start with offset 1, because the end is higher. + for end, child in enumerate(children + [None], 1): + if child is None or child == ',': + param_children = children[start:end] + if param_children: # Could as well be comma and then end. + if param_children[0] == '*' \ + and (len(param_children) == 1 + or param_children[1] == ',') \ + or param_children[0] == '/': + for p in param_children: + p.parent = parent + new_children += param_children + else: + new_children.append(Param(param_children, parent)) + start = end + return new_children + + +class Function(ClassOrFunc): + """ + Used to store the parsed contents of a python function. + + Children:: + + 0. + 1. + 2. parameter list (including open-paren and close-paren s) + 3. or 5. + 4. or 6. Node() representing function body + 3. -> (if annotation is also present) + 4. annotation (if present) + """ + type = 'funcdef' + + def __init__(self, children): + super().__init__(children) + parameters = self.children[2] # After `def foo` + parameters_children = parameters.children[1:-1] + # If input parameters list already has Param objects, keep it as is; + # otherwise, convert it to a list of Param objects. + if not any(isinstance(child, Param) for child in parameters_children): + parameters.children[1:-1] = _create_params(parameters, parameters_children) + + def _get_param_nodes(self): + return self.children[2].children + + def get_params(self): + """ + Returns a list of `Param()`. + """ + return [p for p in self._get_param_nodes() if p.type == 'param'] + + @property + def name(self): + return self.children[1] # First token after `def` + + def iter_yield_exprs(self): + """ + Returns a generator of `yield_expr`. + """ + def scan(children): + for element in children: + if element.type in ('classdef', 'funcdef', 'lambdef'): + continue + + try: + nested_children = element.children + except AttributeError: + if element.value == 'yield': + if element.parent.type == 'yield_expr': + yield element.parent + else: + yield element + else: + yield from scan(nested_children) + + return scan(self.children) + + def iter_return_stmts(self): + """ + Returns a generator of `return_stmt`. + """ + def scan(children): + for element in children: + if element.type == 'return_stmt' \ + or element.type == 'keyword' and element.value == 'return': + yield element + if element.type in _RETURN_STMT_CONTAINERS: + yield from scan(element.children) + + return scan(self.children) + + def iter_raise_stmts(self): + """ + Returns a generator of `raise_stmt`. Includes raise statements inside try-except blocks + """ + def scan(children): + for element in children: + if element.type == 'raise_stmt' \ + or element.type == 'keyword' and element.value == 'raise': + yield element + if element.type in _RETURN_STMT_CONTAINERS: + yield from scan(element.children) + + return scan(self.children) + + def is_generator(self): + """ + :return bool: Checks if a function is a generator or not. + """ + return next(self.iter_yield_exprs(), None) is not None + + @property + def annotation(self): + """ + Returns the test node after `->` or `None` if there is no annotation. + """ + try: + if self.children[3] == "->": + return self.children[4] + assert self.children[3] == ":" + return None + except IndexError: + return None + + +class Lambda(Function): + """ + Lambdas are basically trimmed functions, so give it the same interface. + + Children:: + + 0. + *. for each argument x + -2. + -1. Node() representing body + """ + type = 'lambdef' + __slots__ = () + + def __init__(self, children): + # We don't want to call the Function constructor, call its parent. + super(Function, self).__init__(children) + # Everything between `lambda` and the `:` operator is a parameter. + parameters_children = self.children[1:-2] + # If input children list already has Param objects, keep it as is; + # otherwise, convert it to a list of Param objects. + if not any(isinstance(child, Param) for child in parameters_children): + self.children[1:-2] = _create_params(self, parameters_children) + + @property + def name(self): + """ + Raises an AttributeError. Lambdas don't have a defined name. + """ + raise AttributeError("lambda is not named.") + + def _get_param_nodes(self): + return self.children[1:-2] + + @property + def annotation(self): + """ + Returns `None`, lambdas don't have annotations. + """ + return None + + def __repr__(self): + return "<%s@%s>" % (self.__class__.__name__, self.start_pos) + + +class Flow(PythonBaseNode): + __slots__ = () + + +class IfStmt(Flow): + type = 'if_stmt' + __slots__ = () + + def get_test_nodes(self): + """ + E.g. returns all the `test` nodes that are named as x, below: + + if x: + pass + elif x: + pass + """ + for i, c in enumerate(self.children): + if c in ('elif', 'if'): + yield self.children[i + 1] + + def get_corresponding_test_node(self, node): + """ + Searches for the branch in which the node is and returns the + corresponding test node (see function above). However if the node is in + the test node itself and not in the suite return None. + """ + start_pos = node.start_pos + for check_node in reversed(list(self.get_test_nodes())): + if check_node.start_pos < start_pos: + if start_pos < check_node.end_pos: + return None + # In this case the node is within the check_node itself, + # not in the suite + else: + return check_node + + def is_node_after_else(self, node): + """ + Checks if a node is defined after `else`. + """ + for c in self.children: + if c == 'else': + if node.start_pos > c.start_pos: + return True + else: + return False + + +class WhileStmt(Flow): + type = 'while_stmt' + __slots__ = () + + +class ForStmt(Flow): + type = 'for_stmt' + __slots__ = () + + def get_testlist(self): + """ + Returns the input node ``y`` from: ``for x in y:``. + """ + return self.children[3] + + def get_defined_names(self, include_setitem=False): + return _defined_names(self.children[1], include_setitem) + + +class TryStmt(Flow): + type = 'try_stmt' + __slots__ = () + + def get_except_clause_tests(self): + """ + Returns the ``test`` nodes found in ``except_clause`` nodes. + Returns ``[None]`` for except clauses without an exception given. + """ + for node in self.children: + if node.type == 'except_clause': + yield node.children[1] + elif node == 'except': + yield None + + +class WithStmt(Flow): + type = 'with_stmt' + __slots__ = () + + def get_defined_names(self, include_setitem=False): + """ + Returns the a list of `Name` that the with statement defines. The + defined names are set after `as`. + """ + names = [] + for with_item in self.children[1:-2:2]: + # Check with items for 'as' names. + if with_item.type == 'with_item': + names += _defined_names(with_item.children[2], include_setitem) + return names + + def get_test_node_from_name(self, name): + node = name.search_ancestor("with_item") + if node is None: + raise ValueError('The name is not actually part of a with statement.') + return node.children[0] + + +class Import(PythonBaseNode): + __slots__ = () + + def get_path_for_name(self, name): + """ + The path is the list of names that leads to the searched name. + + :return list of Name: + """ + try: + # The name may be an alias. If it is, just map it back to the name. + name = self._aliases()[name] + except KeyError: + pass + + for path in self.get_paths(): + if name in path: + return path[:path.index(name) + 1] + raise ValueError('Name should be defined in the import itself') + + def is_nested(self): + return False # By default, sub classes may overwrite this behavior + + def is_star_import(self): + return self.children[-1] == '*' + + +class ImportFrom(Import): + type = 'import_from' + __slots__ = () + + def get_defined_names(self, include_setitem=False): + """ + Returns the a list of `Name` that the import defines. The + defined names are set after `import` or in case an alias - `as` - is + present that name is returned. + """ + return [alias or name for name, alias in self._as_name_tuples()] + + def _aliases(self): + """Mapping from alias to its corresponding name.""" + return dict((alias, name) for name, alias in self._as_name_tuples() + if alias is not None) + + def get_from_names(self): + for n in self.children[1:]: + if n not in ('.', '...'): + break + if n.type == 'dotted_name': # from x.y import + return n.children[::2] + elif n == 'import': # from . import + return [] + else: # from x import + return [n] + + @property + def level(self): + """The level parameter of ``__import__``.""" + level = 0 + for n in self.children[1:]: + if n in ('.', '...'): + level += len(n.value) + else: + break + return level + + def _as_name_tuples(self): + last = self.children[-1] + if last == ')': + last = self.children[-2] + elif last == '*': + return # No names defined directly. + + if last.type == 'import_as_names': + as_names = last.children[::2] + else: + as_names = [last] + for as_name in as_names: + if as_name.type == 'name': + yield as_name, None + else: + yield as_name.children[::2] # yields x, y -> ``x as y`` + + def get_paths(self): + """ + The import paths defined in an import statement. Typically an array + like this: ``[, ]``. + + :return list of list of Name: + """ + dotted = self.get_from_names() + + if self.children[-1] == '*': + return [dotted] + return [dotted + [name] for name, alias in self._as_name_tuples()] + + +class ImportName(Import): + """For ``import_name`` nodes. Covers normal imports without ``from``.""" + type = 'import_name' + __slots__ = () + + def get_defined_names(self, include_setitem=False): + """ + Returns the a list of `Name` that the import defines. The defined names + is always the first name after `import` or in case an alias - `as` - is + present that name is returned. + """ + return [alias or path[0] for path, alias in self._dotted_as_names()] + + @property + def level(self): + """The level parameter of ``__import__``.""" + return 0 # Obviously 0 for imports without from. + + def get_paths(self): + return [path for path, alias in self._dotted_as_names()] + + def _dotted_as_names(self): + """Generator of (list(path), alias) where alias may be None.""" + dotted_as_names = self.children[1] + if dotted_as_names.type == 'dotted_as_names': + as_names = dotted_as_names.children[::2] + else: + as_names = [dotted_as_names] + + for as_name in as_names: + if as_name.type == 'dotted_as_name': + alias = as_name.children[2] + as_name = as_name.children[0] + else: + alias = None + if as_name.type == 'name': + yield [as_name], alias + else: + # dotted_names + yield as_name.children[::2], alias + + def is_nested(self): + """ + This checks for the special case of nested imports, without aliases and + from statement:: + + import foo.bar + """ + return bool([1 for path, alias in self._dotted_as_names() + if alias is None and len(path) > 1]) + + def _aliases(self): + """ + :return list of Name: Returns all the alias + """ + return dict((alias, path[-1]) for path, alias in self._dotted_as_names() + if alias is not None) + + +class KeywordStatement(PythonBaseNode): + """ + For the following statements: `assert`, `del`, `global`, `nonlocal`, + `raise`, `return`, `yield`. + + `pass`, `continue` and `break` are not in there, because they are just + simple keywords and the parser reduces it to a keyword. + """ + __slots__ = () + + @property + def type(self): + """ + Keyword statements start with the keyword and end with `_stmt`. You can + crosscheck this with the Python grammar. + """ + return '%s_stmt' % self.keyword + + @property + def keyword(self): + return self.children[0].value + + def get_defined_names(self, include_setitem=False): + keyword = self.keyword + if keyword == 'del': + return _defined_names(self.children[1], include_setitem) + if keyword in ('global', 'nonlocal'): + return self.children[1::2] + return [] + + +class AssertStmt(KeywordStatement): + __slots__ = () + + @property + def assertion(self): + return self.children[1] + + +class GlobalStmt(KeywordStatement): + __slots__ = () + + def get_global_names(self): + return self.children[1::2] + + +class ReturnStmt(KeywordStatement): + __slots__ = () + + +class YieldExpr(PythonBaseNode): + type = 'yield_expr' + __slots__ = () + + +def _defined_names(current, include_setitem): + """ + A helper function to find the defined names in statements, for loops and + list comprehensions. + """ + names = [] + if current.type in ('testlist_star_expr', 'testlist_comp', 'exprlist', 'testlist'): + for child in current.children[::2]: + names += _defined_names(child, include_setitem) + elif current.type in ('atom', 'star_expr'): + names += _defined_names(current.children[1], include_setitem) + elif current.type in ('power', 'atom_expr'): + if current.children[-2] != '**': # Just if there's no operation + trailer = current.children[-1] + if trailer.children[0] == '.': + names.append(trailer.children[1]) + elif trailer.children[0] == '[' and include_setitem: + for node in current.children[-2::-1]: + if node.type == 'trailer': + names.append(node.children[1]) + break + if node.type == 'name': + names.append(node) + break + else: + names.append(current) + return names + + +class ExprStmt(PythonBaseNode, DocstringMixin): + type = 'expr_stmt' + __slots__ = () + + def get_defined_names(self, include_setitem=False): + """ + Returns a list of `Name` defined before the `=` sign. + """ + names = [] + if self.children[1].type == 'annassign': + names = _defined_names(self.children[0], include_setitem) + return [ + name + for i in range(0, len(self.children) - 2, 2) + if '=' in self.children[i + 1].value + for name in _defined_names(self.children[i], include_setitem) + ] + names + + def get_rhs(self): + """Returns the right-hand-side of the equals.""" + node = self.children[-1] + if node.type == 'annassign': + if len(node.children) == 4: + node = node.children[3] + else: + node = node.children[1] + return node + + def yield_operators(self): + """ + Returns a generator of `+=`, `=`, etc. or None if there is no operation. + """ + first = self.children[1] + if first.type == 'annassign': + if len(first.children) <= 2: + return # No operator is available, it's just PEP 484. + + first = first.children[2] + yield first + + yield from self.children[3::2] + + +class NamedExpr(PythonBaseNode): + type = 'namedexpr_test' + + def get_defined_names(self, include_setitem=False): + return _defined_names(self.children[0], include_setitem) + + +class Param(PythonBaseNode): + """ + It's a helper class that makes business logic with params much easier. The + Python grammar defines no ``param`` node. It defines it in a different way + that is not really suited to working with parameters. + """ + type = 'param' + + def __init__(self, children, parent=None): + super().__init__(children) + self.parent = parent + + @property + def star_count(self): + """ + Is `0` in case of `foo`, `1` in case of `*foo` or `2` in case of + `**foo`. + """ + first = self.children[0] + if first in ('*', '**'): + return len(first.value) + return 0 + + @property + def default(self): + """ + The default is the test node that appears after the `=`. Is `None` in + case no default is present. + """ + has_comma = self.children[-1] == ',' + try: + if self.children[-2 - int(has_comma)] == '=': + return self.children[-1 - int(has_comma)] + except IndexError: + return None + + @property + def annotation(self): + """ + The default is the test node that appears after `:`. Is `None` in case + no annotation is present. + """ + tfpdef = self._tfpdef() + if tfpdef.type == 'tfpdef': + assert tfpdef.children[1] == ":" + assert len(tfpdef.children) == 3 + annotation = tfpdef.children[2] + return annotation + else: + return None + + def _tfpdef(self): + """ + tfpdef: see e.g. grammar36.txt. + """ + offset = int(self.children[0] in ('*', '**')) + return self.children[offset] + + @property + def name(self): + """ + The `Name` leaf of the param. + """ + if self._tfpdef().type == 'tfpdef': + return self._tfpdef().children[0] + else: + return self._tfpdef() + + def get_defined_names(self, include_setitem=False): + return [self.name] + + @property + def position_index(self): + """ + Property for the positional index of a paramter. + """ + index = self.parent.children.index(self) + try: + keyword_only_index = self.parent.children.index('*') + if index > keyword_only_index: + # Skip the ` *, ` + index -= 2 + except ValueError: + pass + try: + keyword_only_index = self.parent.children.index('/') + if index > keyword_only_index: + # Skip the ` /, ` + index -= 2 + except ValueError: + pass + return index - 1 + + def get_parent_function(self): + """ + Returns the function/lambda of a parameter. + """ + return self.search_ancestor('funcdef', 'lambdef') + + def get_code(self, include_prefix=True, include_comma=True): + """ + Like all the other get_code functions, but includes the param + `include_comma`. + + :param include_comma bool: If enabled includes the comma in the string output. + """ + if include_comma: + return super().get_code(include_prefix) + + children = self.children + if children[-1] == ',': + children = children[:-1] + return self._get_code_for_children( + children, + include_prefix=include_prefix + ) + + def __repr__(self): + default = '' if self.default is None else '=%s' % self.default.get_code() + return '<%s: %s>' % (type(self).__name__, str(self._tfpdef()) + default) + + +class SyncCompFor(PythonBaseNode): + type = 'sync_comp_for' + __slots__ = () + + def get_defined_names(self, include_setitem=False): + """ + Returns the a list of `Name` that the comprehension defines. + """ + # allow async for + return _defined_names(self.children[1], include_setitem) + + +# This is simply here so an older Jedi version can work with this new parso +# version. Can be deleted in the next release. +CompFor = SyncCompFor + + +class UsedNamesMapping(Mapping): + """ + This class exists for the sole purpose of creating an immutable dict. + """ + def __init__(self, dct): + self._dict = dct + + def __getitem__(self, key): + return self._dict[key] + + def __len__(self): + return len(self._dict) + + def __iter__(self): + return iter(self._dict) + + def __hash__(self): + return id(self) + + def __eq__(self, other): + # Comparing these dicts does not make sense. + return self is other diff --git a/myenv/lib/python3.10/site-packages/parso/tree.py b/myenv/lib/python3.10/site-packages/parso/tree.py new file mode 100644 index 000000000..e5298711e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/tree.py @@ -0,0 +1,488 @@ +from abc import abstractmethod, abstractproperty +from typing import List, Optional, Tuple, Union + +from parso.utils import split_lines + + +def search_ancestor(node: 'NodeOrLeaf', *node_types: str) -> 'Optional[BaseNode]': + """ + Recursively looks at the parents of a node and returns the first found node + that matches ``node_types``. Returns ``None`` if no matching node is found. + + This function is deprecated, use :meth:`NodeOrLeaf.search_ancestor` instead. + + :param node: The ancestors of this node will be checked. + :param node_types: type names that are searched for. + """ + n = node.parent + while n is not None: + if n.type in node_types: + return n + n = n.parent + return None + + +class NodeOrLeaf: + """ + The base class for nodes and leaves. + """ + __slots__ = ('parent',) + type: str + ''' + The type is a string that typically matches the types of the grammar file. + ''' + parent: 'Optional[BaseNode]' + ''' + The parent :class:`BaseNode` of this node or leaf. + None if this is the root node. + ''' + + def get_root_node(self): + """ + Returns the root node of a parser tree. The returned node doesn't have + a parent node like all the other nodes/leaves. + """ + scope = self + while scope.parent is not None: + scope = scope.parent + return scope + + def get_next_sibling(self): + """ + Returns the node immediately following this node in this parent's + children list. If this node does not have a next sibling, it is None + """ + parent = self.parent + if parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(parent.children): + if child is self: + try: + return self.parent.children[i + 1] + except IndexError: + return None + + def get_previous_sibling(self): + """ + Returns the node immediately preceding this node in this parent's + children list. If this node does not have a previous sibling, it is + None. + """ + parent = self.parent + if parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(parent.children): + if child is self: + if i == 0: + return None + return self.parent.children[i - 1] + + def get_previous_leaf(self): + """ + Returns the previous leaf in the parser tree. + Returns `None` if this is the first element in the parser tree. + """ + if self.parent is None: + return None + + node = self + while True: + c = node.parent.children + i = c.index(node) + if i == 0: + node = node.parent + if node.parent is None: + return None + else: + node = c[i - 1] + break + + while True: + try: + node = node.children[-1] + except AttributeError: # A Leaf doesn't have children. + return node + + def get_next_leaf(self): + """ + Returns the next leaf in the parser tree. + Returns None if this is the last element in the parser tree. + """ + if self.parent is None: + return None + + node = self + while True: + c = node.parent.children + i = c.index(node) + if i == len(c) - 1: + node = node.parent + if node.parent is None: + return None + else: + node = c[i + 1] + break + + while True: + try: + node = node.children[0] + except AttributeError: # A Leaf doesn't have children. + return node + + @abstractproperty + def start_pos(self) -> Tuple[int, int]: + """ + Returns the starting position of the prefix as a tuple, e.g. `(3, 4)`. + + :return tuple of int: (line, column) + """ + + @abstractproperty + def end_pos(self) -> Tuple[int, int]: + """ + Returns the end position of the prefix as a tuple, e.g. `(3, 4)`. + + :return tuple of int: (line, column) + """ + + @abstractmethod + def get_start_pos_of_prefix(self): + """ + Returns the start_pos of the prefix. This means basically it returns + the end_pos of the last prefix. The `get_start_pos_of_prefix()` of the + prefix `+` in `2 + 1` would be `(1, 1)`, while the start_pos is + `(1, 2)`. + + :return tuple of int: (line, column) + """ + + @abstractmethod + def get_first_leaf(self): + """ + Returns the first leaf of a node or itself if this is a leaf. + """ + + @abstractmethod + def get_last_leaf(self): + """ + Returns the last leaf of a node or itself if this is a leaf. + """ + + @abstractmethod + def get_code(self, include_prefix=True): + """ + Returns the code that was the input for the parser for this node. + + :param include_prefix: Removes the prefix (whitespace and comments) of + e.g. a statement. + """ + + def search_ancestor(self, *node_types: str) -> 'Optional[BaseNode]': + """ + Recursively looks at the parents of this node or leaf and returns the + first found node that matches ``node_types``. Returns ``None`` if no + matching node is found. + + :param node_types: type names that are searched for. + """ + node = self.parent + while node is not None: + if node.type in node_types: + return node + node = node.parent + return None + + def dump(self, *, indent: Optional[Union[int, str]] = 4) -> str: + """ + Returns a formatted dump of the parser tree rooted at this node or leaf. This is + mainly useful for debugging purposes. + + The ``indent`` parameter is interpreted in a similar way as :py:func:`ast.dump`. + If ``indent`` is a non-negative integer or string, then the tree will be + pretty-printed with that indent level. An indent level of 0, negative, or ``""`` + will only insert newlines. ``None`` selects the single line representation. + Using a positive integer indent indents that many spaces per level. If + ``indent`` is a string (such as ``"\\t"``), that string is used to indent each + level. + + :param indent: Indentation style as described above. The default indentation is + 4 spaces, which yields a pretty-printed dump. + + >>> import parso + >>> print(parso.parse("lambda x, y: x + y").dump()) + Module([ + Lambda([ + Keyword('lambda', (1, 0)), + Param([ + Name('x', (1, 7), prefix=' '), + Operator(',', (1, 8)), + ]), + Param([ + Name('y', (1, 10), prefix=' '), + ]), + Operator(':', (1, 11)), + PythonNode('arith_expr', [ + Name('x', (1, 13), prefix=' '), + Operator('+', (1, 15), prefix=' '), + Name('y', (1, 17), prefix=' '), + ]), + ]), + EndMarker('', (1, 18)), + ]) + """ + if indent is None: + newline = False + indent_string = '' + elif isinstance(indent, int): + newline = True + indent_string = ' ' * indent + elif isinstance(indent, str): + newline = True + indent_string = indent + else: + raise TypeError(f"expect 'indent' to be int, str or None, got {indent!r}") + + def _format_dump(node: NodeOrLeaf, indent: str = '', top_level: bool = True) -> str: + result = '' + node_type = type(node).__name__ + if isinstance(node, Leaf): + result += f'{indent}{node_type}(' + if isinstance(node, ErrorLeaf): + result += f'{node.token_type!r}, ' + elif isinstance(node, TypedLeaf): + result += f'{node.type!r}, ' + result += f'{node.value!r}, {node.start_pos!r}' + if node.prefix: + result += f', prefix={node.prefix!r}' + result += ')' + elif isinstance(node, BaseNode): + result += f'{indent}{node_type}(' + if isinstance(node, Node): + result += f'{node.type!r}, ' + result += '[' + if newline: + result += '\n' + for child in node.children: + result += _format_dump(child, indent=indent + indent_string, top_level=False) + result += f'{indent}])' + else: # pragma: no cover + # We shouldn't ever reach here, unless: + # - `NodeOrLeaf` is incorrectly subclassed else where + # - or a node's children list contains invalid nodes or leafs + # Both are unexpected internal errors. + raise TypeError(f'unsupported node encountered: {node!r}') + if not top_level: + if newline: + result += ',\n' + else: + result += ', ' + return result + + return _format_dump(self) + + +class Leaf(NodeOrLeaf): + ''' + Leafs are basically tokens with a better API. Leafs exactly know where they + were defined and what text preceeds them. + ''' + __slots__ = ('value', 'line', 'column', 'prefix') + prefix: str + + def __init__(self, value: str, start_pos: Tuple[int, int], prefix: str = '') -> None: + self.value = value + ''' + :py:func:`str` The value of the current token. + ''' + self.start_pos = start_pos + self.prefix = prefix + ''' + :py:func:`str` Typically a mixture of whitespace and comments. Stuff + that is syntactically irrelevant for the syntax tree. + ''' + self.parent: Optional[BaseNode] = None + ''' + The parent :class:`BaseNode` of this leaf. + ''' + + @property + def start_pos(self) -> Tuple[int, int]: + return self.line, self.column + + @start_pos.setter + def start_pos(self, value: Tuple[int, int]) -> None: + self.line = value[0] + self.column = value[1] + + def get_start_pos_of_prefix(self): + previous_leaf = self.get_previous_leaf() + if previous_leaf is None: + lines = split_lines(self.prefix) + # + 1 is needed because split_lines always returns at least ['']. + return self.line - len(lines) + 1, 0 # It's the first leaf. + return previous_leaf.end_pos + + def get_first_leaf(self): + return self + + def get_last_leaf(self): + return self + + def get_code(self, include_prefix=True): + if include_prefix: + return self.prefix + self.value + else: + return self.value + + @property + def end_pos(self) -> Tuple[int, int]: + lines = split_lines(self.value) + end_pos_line = self.line + len(lines) - 1 + # Check for multiline token + if self.line == end_pos_line: + end_pos_column = self.column + len(lines[-1]) + else: + end_pos_column = len(lines[-1]) + return end_pos_line, end_pos_column + + def __repr__(self): + value = self.value + if not value: + value = self.type + return "<%s: %s>" % (type(self).__name__, value) + + +class TypedLeaf(Leaf): + __slots__ = ('type',) + + def __init__(self, type, value, start_pos, prefix=''): + super().__init__(value, start_pos, prefix) + self.type = type + + +class BaseNode(NodeOrLeaf): + """ + The super class for all nodes. + A node has children, a type and possibly a parent node. + """ + __slots__ = ('children',) + + def __init__(self, children: List[NodeOrLeaf]) -> None: + self.children = children + """ + A list of :class:`NodeOrLeaf` child nodes. + """ + self.parent: Optional[BaseNode] = None + ''' + The parent :class:`BaseNode` of this node. + None if this is the root node. + ''' + for child in children: + child.parent = self + + @property + def start_pos(self) -> Tuple[int, int]: + return self.children[0].start_pos + + def get_start_pos_of_prefix(self): + return self.children[0].get_start_pos_of_prefix() + + @property + def end_pos(self) -> Tuple[int, int]: + return self.children[-1].end_pos + + def _get_code_for_children(self, children, include_prefix): + if include_prefix: + return "".join(c.get_code() for c in children) + else: + first = children[0].get_code(include_prefix=False) + return first + "".join(c.get_code() for c in children[1:]) + + def get_code(self, include_prefix=True): + return self._get_code_for_children(self.children, include_prefix) + + def get_leaf_for_position(self, position, include_prefixes=False): + """ + Get the :py:class:`parso.tree.Leaf` at ``position`` + + :param tuple position: A position tuple, row, column. Rows start from 1 + :param bool include_prefixes: If ``False``, ``None`` will be returned if ``position`` falls + on whitespace or comments before a leaf + :return: :py:class:`parso.tree.Leaf` at ``position``, or ``None`` + """ + def binary_search(lower, upper): + if lower == upper: + element = self.children[lower] + if not include_prefixes and position < element.start_pos: + # We're on a prefix. + return None + # In case we have prefixes, a leaf always matches + try: + return element.get_leaf_for_position(position, include_prefixes) + except AttributeError: + return element + + index = int((lower + upper) / 2) + element = self.children[index] + if position <= element.end_pos: + return binary_search(lower, index) + else: + return binary_search(index + 1, upper) + + if not ((1, 0) <= position <= self.children[-1].end_pos): + raise ValueError('Please provide a position that exists within this node.') + return binary_search(0, len(self.children) - 1) + + def get_first_leaf(self): + return self.children[0].get_first_leaf() + + def get_last_leaf(self): + return self.children[-1].get_last_leaf() + + def __repr__(self): + code = self.get_code().replace('\n', ' ').replace('\r', ' ').strip() + return "<%s: %s@%s,%s>" % \ + (type(self).__name__, code, self.start_pos[0], self.start_pos[1]) + + +class Node(BaseNode): + """Concrete implementation for interior nodes.""" + __slots__ = ('type',) + + def __init__(self, type, children): + super().__init__(children) + self.type = type + + def __repr__(self): + return "%s(%s, %r)" % (self.__class__.__name__, self.type, self.children) + + +class ErrorNode(BaseNode): + """ + A node that contains valid nodes/leaves that we're follow by a token that + was invalid. This basically means that the leaf after this node is where + Python would mark a syntax error. + """ + __slots__ = () + type = 'error_node' + + +class ErrorLeaf(Leaf): + """ + A leaf that is either completely invalid in a language (like `$` in Python) + or is invalid at that position. Like the star in `1 +* 1`. + """ + __slots__ = ('token_type',) + type = 'error_leaf' + + def __init__(self, token_type, value, start_pos, prefix=''): + super().__init__(value, start_pos, prefix) + self.token_type = token_type + + def __repr__(self): + return "<%s: %s:%s, %s>" % \ + (type(self).__name__, self.token_type, repr(self.value), self.start_pos) diff --git a/myenv/lib/python3.10/site-packages/parso/utils.py b/myenv/lib/python3.10/site-packages/parso/utils.py new file mode 100644 index 000000000..6ad1ea26a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/parso/utils.py @@ -0,0 +1,194 @@ +import re +import sys +from ast import literal_eval +from functools import total_ordering +from typing import NamedTuple, Sequence, Union + +# The following is a list in Python that are line breaks in str.splitlines, but +# not in Python. In Python only \r (Carriage Return, 0xD) and \n (Line Feed, +# 0xA) are allowed to split lines. +_NON_LINE_BREAKS = ( + '\v', # Vertical Tabulation 0xB + '\f', # Form Feed 0xC + '\x1C', # File Separator + '\x1D', # Group Separator + '\x1E', # Record Separator + '\x85', # Next Line (NEL - Equivalent to CR+LF. + # Used to mark end-of-line on some IBM mainframes.) + '\u2028', # Line Separator + '\u2029', # Paragraph Separator +) + + +class Version(NamedTuple): + major: int + minor: int + micro: int + + +def split_lines(string: str, keepends: bool = False) -> Sequence[str]: + r""" + Intended for Python code. In contrast to Python's :py:meth:`str.splitlines`, + looks at form feeds and other special characters as normal text. Just + splits ``\n`` and ``\r\n``. + Also different: Returns ``[""]`` for an empty string input. + + In Python 2.7 form feeds are used as normal characters when using + str.splitlines. However in Python 3 somewhere there was a decision to split + also on form feeds. + """ + if keepends: + lst = string.splitlines(True) + + # We have to merge lines that were broken by form feed characters. + merge = [] + for i, line in enumerate(lst): + try: + last_chr = line[-1] + except IndexError: + pass + else: + if last_chr in _NON_LINE_BREAKS: + merge.append(i) + + for index in reversed(merge): + try: + lst[index] = lst[index] + lst[index + 1] + del lst[index + 1] + except IndexError: + # index + 1 can be empty and therefore there's no need to + # merge. + pass + + # The stdlib's implementation of the end is inconsistent when calling + # it with/without keepends. One time there's an empty string in the + # end, one time there's none. + if string.endswith('\n') or string.endswith('\r') or string == '': + lst.append('') + return lst + else: + return re.split(r'\n|\r\n|\r', string) + + +def python_bytes_to_unicode( + source: Union[str, bytes], encoding: str = 'utf-8', errors: str = 'strict' +) -> str: + """ + Checks for unicode BOMs and PEP 263 encoding declarations. Then returns a + unicode object like in :py:meth:`bytes.decode`. + + :param encoding: See :py:meth:`bytes.decode` documentation. + :param errors: See :py:meth:`bytes.decode` documentation. ``errors`` can be + ``'strict'``, ``'replace'`` or ``'ignore'``. + """ + def detect_encoding(): + """ + For the implementation of encoding definitions in Python, look at: + - http://www.python.org/dev/peps/pep-0263/ + - http://docs.python.org/2/reference/lexical_analysis.html#encoding-declarations + """ + byte_mark = literal_eval(r"b'\xef\xbb\xbf'") + if source.startswith(byte_mark): + # UTF-8 byte-order mark + return 'utf-8' + + first_two_lines = re.match(br'(?:[^\r\n]*(?:\r\n|\r|\n)){0,2}', source).group(0) + possible_encoding = re.search(br"coding[=:]\s*([-\w.]+)", + first_two_lines) + if possible_encoding: + e = possible_encoding.group(1) + if not isinstance(e, str): + e = str(e, 'ascii', 'replace') + return e + else: + # the default if nothing else has been set -> PEP 263 + return encoding + + if isinstance(source, str): + # only cast str/bytes + return source + + encoding = detect_encoding() + try: + # Cast to unicode + return str(source, encoding, errors) + except LookupError: + if errors == 'replace': + # This is a weird case that can happen if the given encoding is not + # a valid encoding. This usually shouldn't happen with provided + # encodings, but can happen if somebody uses encoding declarations + # like `# coding: foo-8`. + return str(source, 'utf-8', errors) + raise + + +def version_info() -> Version: + """ + Returns a namedtuple of parso's version, similar to Python's + ``sys.version_info``. + """ + from parso import __version__ + tupl = re.findall(r'[a-z]+|\d+', __version__) + return Version(*[x if i == 3 else int(x) for i, x in enumerate(tupl)]) + + +class _PythonVersionInfo(NamedTuple): + major: int + minor: int + + +@total_ordering +class PythonVersionInfo(_PythonVersionInfo): + def __gt__(self, other): + if isinstance(other, tuple): + if len(other) != 2: + raise ValueError("Can only compare to tuples of length 2.") + return (self.major, self.minor) > other + super().__gt__(other) + + return (self.major, self.minor) + + def __eq__(self, other): + if isinstance(other, tuple): + if len(other) != 2: + raise ValueError("Can only compare to tuples of length 2.") + return (self.major, self.minor) == other + super().__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + +def _parse_version(version) -> PythonVersionInfo: + match = re.match(r'(\d+)(?:\.(\d{1,2})(?:\.\d+)?)?((a|b|rc)\d)?$', version) + if match is None: + raise ValueError('The given version is not in the right format. ' + 'Use something like "3.8" or "3".') + + major = int(match.group(1)) + minor = match.group(2) + if minor is None: + # Use the latest Python in case it's not exactly defined, because the + # grammars are typically backwards compatible? + if major == 2: + minor = "7" + elif major == 3: + minor = "6" + else: + raise NotImplementedError("Sorry, no support yet for those fancy new/old versions.") + minor = int(minor) + return PythonVersionInfo(major, minor) + + +def parse_version_string(version: str = None) -> PythonVersionInfo: + """ + Checks for a valid version number (e.g. `3.8` or `3.10.1` or `3`) and + returns a corresponding version info that is always two characters long in + decimal. + """ + if version is None: + version = '%s.%s' % sys.version_info[:2] + if not isinstance(version, str): + raise TypeError('version must be a string like "3.8"') + + return _parse_version(version) diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/INSTALLER b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/LICENSE b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/LICENSE new file mode 100644 index 000000000..754db5afc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +ISC LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2013-2014, Pexpect development team + Copyright (c) 2012, Noah Spurrier + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/METADATA b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/METADATA new file mode 100644 index 000000000..4c417227d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/METADATA @@ -0,0 +1,49 @@ +Metadata-Version: 2.1 +Name: pexpect +Version: 4.8.0 +Summary: Pexpect allows easy control of interactive console applications. +Home-page: https://pexpect.readthedocs.io/ +Author: Noah Spurrier; Thomas Kluyver; Jeff Quast +Author-email: noah@noah.org, thomas@kluyver.me.uk, contact@jeffquast.com +License: ISC license +Platform: UNIX +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: ISC License (ISCL) +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Quality Assurance +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: System +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Installation/Setup +Classifier: Topic :: System :: Shells +Classifier: Topic :: System :: Software Distribution +Classifier: Topic :: Terminals +Requires-Dist: ptyprocess (>=0.5) + + +Pexpect is a pure Python module for spawning child applications; controlling +them; and responding to expected patterns in their output. Pexpect works like +Don Libes' Expect. Pexpect allows your script to spawn a child application and +control it as if a human were typing commands. + +Pexpect can be used for automating interactive applications such as ssh, ftp, +passwd, telnet, etc. It can be used to a automate setup scripts for duplicating +software package installations on different servers. It can be used for +automated software testing. Pexpect is in the spirit of Don Libes' Expect, but +Pexpect is pure Python. + +The main features of Pexpect require the pty module in the Python standard +library, which is only available on Unix-like systems. Some features—waiting +for patterns from file descriptors or subprocesses—are also available on +Windows. + + diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/RECORD b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/RECORD new file mode 100644 index 000000000..ddbf1b3fb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/RECORD @@ -0,0 +1,38 @@ +pexpect-4.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pexpect-4.8.0.dist-info/LICENSE,sha256=Skg64cTcc4psi3P-tJB04YNdoCq1qmhvJnUCmQb6Nk0,987 +pexpect-4.8.0.dist-info/METADATA,sha256=gD4QNR0Xubt0nLzHH_YhdcXrQS5MIHGuxE-kJs0RNJo,2180 +pexpect-4.8.0.dist-info/RECORD,, +pexpect-4.8.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pexpect-4.8.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +pexpect-4.8.0.dist-info/top_level.txt,sha256=O-b3UY9VQZkW3yDAeFNatUOKO4GojVWO4TTHoI9-E7k,8 +pexpect/ANSI.py,sha256=aA-3tdXz_FZ4G7PAqFZi5g1KBGQ6PzJzS0gm3ALZKZw,12177 +pexpect/FSM.py,sha256=tluiyUGMyIH3q_wLG6Ak1NZVuXUAGNDjq6k6BK1q8RY,13419 +pexpect/__init__.py,sha256=xF4qylJdK-FRy40tmhAXu99fV9ewFasxTH3DSgoZjzQ,3902 +pexpect/__pycache__/ANSI.cpython-310.pyc,, +pexpect/__pycache__/FSM.cpython-310.pyc,, +pexpect/__pycache__/__init__.cpython-310.pyc,, +pexpect/__pycache__/_async.cpython-310.pyc,, +pexpect/__pycache__/exceptions.cpython-310.pyc,, +pexpect/__pycache__/expect.cpython-310.pyc,, +pexpect/__pycache__/fdpexpect.cpython-310.pyc,, +pexpect/__pycache__/popen_spawn.cpython-310.pyc,, +pexpect/__pycache__/pty_spawn.cpython-310.pyc,, +pexpect/__pycache__/pxssh.cpython-310.pyc,, +pexpect/__pycache__/replwrap.cpython-310.pyc,, +pexpect/__pycache__/run.cpython-310.pyc,, +pexpect/__pycache__/screen.cpython-310.pyc,, +pexpect/__pycache__/spawnbase.cpython-310.pyc,, +pexpect/__pycache__/utils.cpython-310.pyc,, +pexpect/_async.py,sha256=UCUC9kbBZGjzG12YcR_M5yBjB4Dwc8nJOYNPklL-OdU,3304 +pexpect/bashrc.sh,sha256=CHK8qDg_HtDVdfyDULOV8MZDRDr4pOaIbo31XV58nQs,380 +pexpect/exceptions.py,sha256=A9C1PWbBc2j9AKvnv7UkPCawhFTEGYmeULW0vwbMvXQ,1068 +pexpect/expect.py,sha256=KKtBmx2MYa-yDE715XlHUcloKe5ndBD359a4OYVXD84,13827 +pexpect/fdpexpect.py,sha256=ugTrwveFi-zfl_nOPjbRyLUER1Wmhu8YxczCWtZgZWc,5828 +pexpect/popen_spawn.py,sha256=hVHOqr22jD2Pr-yVgsfwgqGAtULLi6kJLKQRrTBPvEg,6161 +pexpect/pty_spawn.py,sha256=ZygSYsdnVJ5acxiNM9gLvLrT2AVqgwJvbDcPaTxxv9E,37382 +pexpect/pxssh.py,sha256=bZHwFDOn1gC8U_Sl07eFFRlYfCjGCwEoC9WaZCHQo5Y,24279 +pexpect/replwrap.py,sha256=Raq9XgYfIlF-rH_CALgFbzK1H_A4o0NqmK9q45anmVA,5633 +pexpect/run.py,sha256=XK2GwW6_wbUZ6buIDbhouaOySVPnc5IahbgSjieks50,6628 +pexpect/screen.py,sha256=-twD4sIEp83nzuYH9lRDzwHfesoTgVGWglsBYWOK7Ks,13704 +pexpect/spawnbase.py,sha256=FoaNvkGYIXrD6xmBedrJmm_oVLMatqbGCXc027CugkQ,21247 +pexpect/utils.py,sha256=1jIhzU7eBvY3pbW3LZoJhCOU2KWqgty5HgQ6VBYIp5U,6019 diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/REQUESTED b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/WHEEL b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/WHEEL new file mode 100644 index 000000000..8b701e93c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/top_level.txt b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/top_level.txt new file mode 100644 index 000000000..808fb07af --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect-4.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +pexpect diff --git a/myenv/lib/python3.10/site-packages/pexpect/ANSI.py b/myenv/lib/python3.10/site-packages/pexpect/ANSI.py new file mode 100644 index 000000000..1cd2e90e7 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/ANSI.py @@ -0,0 +1,351 @@ +'''This implements an ANSI (VT100) terminal emulator as a subclass of screen. + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +''' + +# references: +# http://en.wikipedia.org/wiki/ANSI_escape_code +# http://www.retards.org/terminals/vt102.html +# http://vt100.net/docs/vt102-ug/contents.html +# http://vt100.net/docs/vt220-rm/ +# http://www.termsys.demon.co.uk/vtansi.htm + +from . import screen +from . import FSM +import string + +# +# The 'Do.*' functions are helper functions for the ANSI class. +# +def DoEmit (fsm): + + screen = fsm.memory[0] + screen.write_ch(fsm.input_symbol) + +def DoStartNumber (fsm): + + fsm.memory.append (fsm.input_symbol) + +def DoBuildNumber (fsm): + + ns = fsm.memory.pop() + ns = ns + fsm.input_symbol + fsm.memory.append (ns) + +def DoBackOne (fsm): + + screen = fsm.memory[0] + screen.cursor_back () + +def DoBack (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_back (count) + +def DoDownOne (fsm): + + screen = fsm.memory[0] + screen.cursor_down () + +def DoDown (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_down (count) + +def DoForwardOne (fsm): + + screen = fsm.memory[0] + screen.cursor_forward () + +def DoForward (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_forward (count) + +def DoUpReverse (fsm): + + screen = fsm.memory[0] + screen.cursor_up_reverse() + +def DoUpOne (fsm): + + screen = fsm.memory[0] + screen.cursor_up () + +def DoUp (fsm): + + count = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_up (count) + +def DoHome (fsm): + + c = int(fsm.memory.pop()) + r = int(fsm.memory.pop()) + screen = fsm.memory[0] + screen.cursor_home (r,c) + +def DoHomeOrigin (fsm): + + c = 1 + r = 1 + screen = fsm.memory[0] + screen.cursor_home (r,c) + +def DoEraseDown (fsm): + + screen = fsm.memory[0] + screen.erase_down() + +def DoErase (fsm): + + arg = int(fsm.memory.pop()) + screen = fsm.memory[0] + if arg == 0: + screen.erase_down() + elif arg == 1: + screen.erase_up() + elif arg == 2: + screen.erase_screen() + +def DoEraseEndOfLine (fsm): + + screen = fsm.memory[0] + screen.erase_end_of_line() + +def DoEraseLine (fsm): + + arg = int(fsm.memory.pop()) + screen = fsm.memory[0] + if arg == 0: + screen.erase_end_of_line() + elif arg == 1: + screen.erase_start_of_line() + elif arg == 2: + screen.erase_line() + +def DoEnableScroll (fsm): + + screen = fsm.memory[0] + screen.scroll_screen() + +def DoCursorSave (fsm): + + screen = fsm.memory[0] + screen.cursor_save_attrs() + +def DoCursorRestore (fsm): + + screen = fsm.memory[0] + screen.cursor_restore_attrs() + +def DoScrollRegion (fsm): + + screen = fsm.memory[0] + r2 = int(fsm.memory.pop()) + r1 = int(fsm.memory.pop()) + screen.scroll_screen_rows (r1,r2) + +def DoMode (fsm): + + screen = fsm.memory[0] + mode = fsm.memory.pop() # Should be 4 + # screen.setReplaceMode () + +def DoLog (fsm): + + screen = fsm.memory[0] + fsm.memory = [screen] + fout = open ('log', 'a') + fout.write (fsm.input_symbol + ',' + fsm.current_state + '\n') + fout.close() + +class term (screen.screen): + + '''This class is an abstract, generic terminal. + This does nothing. This is a placeholder that + provides a common base class for other terminals + such as an ANSI terminal. ''' + + def __init__ (self, r=24, c=80, *args, **kwargs): + + screen.screen.__init__(self, r,c,*args,**kwargs) + +class ANSI (term): + '''This class implements an ANSI (VT100) terminal. + It is a stream filter that recognizes ANSI terminal + escape sequences and maintains the state of a screen object. ''' + + def __init__ (self, r=24,c=80,*args,**kwargs): + + term.__init__(self,r,c,*args,**kwargs) + + #self.screen = screen (24,80) + self.state = FSM.FSM ('INIT',[self]) + self.state.set_default_transition (DoLog, 'INIT') + self.state.add_transition_any ('INIT', DoEmit, 'INIT') + self.state.add_transition ('\x1b', 'INIT', None, 'ESC') + self.state.add_transition_any ('ESC', DoLog, 'INIT') + self.state.add_transition ('(', 'ESC', None, 'G0SCS') + self.state.add_transition (')', 'ESC', None, 'G1SCS') + self.state.add_transition_list ('AB012', 'G0SCS', None, 'INIT') + self.state.add_transition_list ('AB012', 'G1SCS', None, 'INIT') + self.state.add_transition ('7', 'ESC', DoCursorSave, 'INIT') + self.state.add_transition ('8', 'ESC', DoCursorRestore, 'INIT') + self.state.add_transition ('M', 'ESC', DoUpReverse, 'INIT') + self.state.add_transition ('>', 'ESC', DoUpReverse, 'INIT') + self.state.add_transition ('<', 'ESC', DoUpReverse, 'INIT') + self.state.add_transition ('=', 'ESC', None, 'INIT') # Selects application keypad. + self.state.add_transition ('#', 'ESC', None, 'GRAPHICS_POUND') + self.state.add_transition_any ('GRAPHICS_POUND', None, 'INIT') + self.state.add_transition ('[', 'ESC', None, 'ELB') + # ELB means Escape Left Bracket. That is ^[[ + self.state.add_transition ('H', 'ELB', DoHomeOrigin, 'INIT') + self.state.add_transition ('D', 'ELB', DoBackOne, 'INIT') + self.state.add_transition ('B', 'ELB', DoDownOne, 'INIT') + self.state.add_transition ('C', 'ELB', DoForwardOne, 'INIT') + self.state.add_transition ('A', 'ELB', DoUpOne, 'INIT') + self.state.add_transition ('J', 'ELB', DoEraseDown, 'INIT') + self.state.add_transition ('K', 'ELB', DoEraseEndOfLine, 'INIT') + self.state.add_transition ('r', 'ELB', DoEnableScroll, 'INIT') + self.state.add_transition ('m', 'ELB', self.do_sgr, 'INIT') + self.state.add_transition ('?', 'ELB', None, 'MODECRAP') + self.state.add_transition_list (string.digits, 'ELB', DoStartNumber, 'NUMBER_1') + self.state.add_transition_list (string.digits, 'NUMBER_1', DoBuildNumber, 'NUMBER_1') + self.state.add_transition ('D', 'NUMBER_1', DoBack, 'INIT') + self.state.add_transition ('B', 'NUMBER_1', DoDown, 'INIT') + self.state.add_transition ('C', 'NUMBER_1', DoForward, 'INIT') + self.state.add_transition ('A', 'NUMBER_1', DoUp, 'INIT') + self.state.add_transition ('J', 'NUMBER_1', DoErase, 'INIT') + self.state.add_transition ('K', 'NUMBER_1', DoEraseLine, 'INIT') + self.state.add_transition ('l', 'NUMBER_1', DoMode, 'INIT') + ### It gets worse... the 'm' code can have infinite number of + ### number;number;number before it. I've never seen more than two, + ### but the specs say it's allowed. crap! + self.state.add_transition ('m', 'NUMBER_1', self.do_sgr, 'INIT') + ### LED control. Same implementation problem as 'm' code. + self.state.add_transition ('q', 'NUMBER_1', self.do_decsca, 'INIT') + + # \E[?47h switch to alternate screen + # \E[?47l restores to normal screen from alternate screen. + self.state.add_transition_list (string.digits, 'MODECRAP', DoStartNumber, 'MODECRAP_NUM') + self.state.add_transition_list (string.digits, 'MODECRAP_NUM', DoBuildNumber, 'MODECRAP_NUM') + self.state.add_transition ('l', 'MODECRAP_NUM', self.do_modecrap, 'INIT') + self.state.add_transition ('h', 'MODECRAP_NUM', self.do_modecrap, 'INIT') + +#RM Reset Mode Esc [ Ps l none + self.state.add_transition (';', 'NUMBER_1', None, 'SEMICOLON') + self.state.add_transition_any ('SEMICOLON', DoLog, 'INIT') + self.state.add_transition_list (string.digits, 'SEMICOLON', DoStartNumber, 'NUMBER_2') + self.state.add_transition_list (string.digits, 'NUMBER_2', DoBuildNumber, 'NUMBER_2') + self.state.add_transition_any ('NUMBER_2', DoLog, 'INIT') + self.state.add_transition ('H', 'NUMBER_2', DoHome, 'INIT') + self.state.add_transition ('f', 'NUMBER_2', DoHome, 'INIT') + self.state.add_transition ('r', 'NUMBER_2', DoScrollRegion, 'INIT') + ### It gets worse... the 'm' code can have infinite number of + ### number;number;number before it. I've never seen more than two, + ### but the specs say it's allowed. crap! + self.state.add_transition ('m', 'NUMBER_2', self.do_sgr, 'INIT') + ### LED control. Same problem as 'm' code. + self.state.add_transition ('q', 'NUMBER_2', self.do_decsca, 'INIT') + self.state.add_transition (';', 'NUMBER_2', None, 'SEMICOLON_X') + + # Create a state for 'q' and 'm' which allows an infinite number of ignored numbers + self.state.add_transition_any ('SEMICOLON_X', DoLog, 'INIT') + self.state.add_transition_list (string.digits, 'SEMICOLON_X', DoStartNumber, 'NUMBER_X') + self.state.add_transition_list (string.digits, 'NUMBER_X', DoBuildNumber, 'NUMBER_X') + self.state.add_transition_any ('NUMBER_X', DoLog, 'INIT') + self.state.add_transition ('m', 'NUMBER_X', self.do_sgr, 'INIT') + self.state.add_transition ('q', 'NUMBER_X', self.do_decsca, 'INIT') + self.state.add_transition (';', 'NUMBER_X', None, 'SEMICOLON_X') + + def process (self, c): + """Process a single character. Called by :meth:`write`.""" + if isinstance(c, bytes): + c = self._decode(c) + self.state.process(c) + + def process_list (self, l): + + self.write(l) + + def write (self, s): + """Process text, writing it to the virtual screen while handling + ANSI escape codes. + """ + if isinstance(s, bytes): + s = self._decode(s) + for c in s: + self.process(c) + + def flush (self): + pass + + def write_ch (self, ch): + '''This puts a character at the current cursor position. The cursor + position is moved forward with wrap-around, but no scrolling is done if + the cursor hits the lower-right corner of the screen. ''' + + if isinstance(ch, bytes): + ch = self._decode(ch) + + #\r and \n both produce a call to cr() and lf(), respectively. + ch = ch[0] + + if ch == u'\r': + self.cr() + return + if ch == u'\n': + self.crlf() + return + if ch == chr(screen.BS): + self.cursor_back() + return + self.put_abs(self.cur_r, self.cur_c, ch) + old_r = self.cur_r + old_c = self.cur_c + self.cursor_forward() + if old_c == self.cur_c: + self.cursor_down() + if old_r != self.cur_r: + self.cursor_home (self.cur_r, 1) + else: + self.scroll_up () + self.cursor_home (self.cur_r, 1) + self.erase_line() + + def do_sgr (self, fsm): + '''Select Graphic Rendition, e.g. color. ''' + screen = fsm.memory[0] + fsm.memory = [screen] + + def do_decsca (self, fsm): + '''Select character protection attribute. ''' + screen = fsm.memory[0] + fsm.memory = [screen] + + def do_modecrap (self, fsm): + '''Handler for \x1b[?h and \x1b[?l. If anyone + wanted to actually use these, they'd need to add more states to the + FSM rather than just improve or override this method. ''' + screen = fsm.memory[0] + fsm.memory = [screen] diff --git a/myenv/lib/python3.10/site-packages/pexpect/FSM.py b/myenv/lib/python3.10/site-packages/pexpect/FSM.py new file mode 100644 index 000000000..46b392ea0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/FSM.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python + +'''This module implements a Finite State Machine (FSM). In addition to state +this FSM also maintains a user defined "memory". So this FSM can be used as a +Push-down Automata (PDA) since a PDA is a FSM + memory. + +The following describes how the FSM works, but you will probably also need to +see the example function to understand how the FSM is used in practice. + +You define an FSM by building tables of transitions. For a given input symbol +the process() method uses these tables to decide what action to call and what +the next state will be. The FSM has a table of transitions that associate: + + (input_symbol, current_state) --> (action, next_state) + +Where "action" is a function you define. The symbols and states can be any +objects. You use the add_transition() and add_transition_list() methods to add +to the transition table. The FSM also has a table of transitions that +associate: + + (current_state) --> (action, next_state) + +You use the add_transition_any() method to add to this transition table. The +FSM also has one default transition that is not associated with any specific +input_symbol or state. You use the set_default_transition() method to set the +default transition. + +When an action function is called it is passed a reference to the FSM. The +action function may then access attributes of the FSM such as input_symbol, +current_state, or "memory". The "memory" attribute can be any object that you +want to pass along to the action functions. It is not used by the FSM itself. +For parsing you would typically pass a list to be used as a stack. + +The processing sequence is as follows. The process() method is given an +input_symbol to process. The FSM will search the table of transitions that +associate: + + (input_symbol, current_state) --> (action, next_state) + +If the pair (input_symbol, current_state) is found then process() will call the +associated action function and then set the current state to the next_state. + +If the FSM cannot find a match for (input_symbol, current_state) it will then +search the table of transitions that associate: + + (current_state) --> (action, next_state) + +If the current_state is found then the process() method will call the +associated action function and then set the current state to the next_state. +Notice that this table lacks an input_symbol. It lets you define transitions +for a current_state and ANY input_symbol. Hence, it is called the "any" table. +Remember, it is always checked after first searching the table for a specific +(input_symbol, current_state). + +For the case where the FSM did not match either of the previous two cases the +FSM will try to use the default transition. If the default transition is +defined then the process() method will call the associated action function and +then set the current state to the next_state. This lets you define a default +transition as a catch-all case. You can think of it as an exception handler. +There can be only one default transition. + +Finally, if none of the previous cases are defined for an input_symbol and +current_state then the FSM will raise an exception. This may be desirable, but +you can always prevent this just by defining a default transition. + +Noah Spurrier 20020822 + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +''' + +class ExceptionFSM(Exception): + + '''This is the FSM Exception class.''' + + def __init__(self, value): + self.value = value + + def __str__(self): + return 'ExceptionFSM: ' + str(self.value) + +class FSM: + + '''This is a Finite State Machine (FSM). + ''' + + def __init__(self, initial_state, memory=None): + + '''This creates the FSM. You set the initial state here. The "memory" + attribute is any object that you want to pass along to the action + functions. It is not used by the FSM. For parsing you would typically + pass a list to be used as a stack. ''' + + # Map (input_symbol, current_state) --> (action, next_state). + self.state_transitions = {} + # Map (current_state) --> (action, next_state). + self.state_transitions_any = {} + self.default_transition = None + + self.input_symbol = None + self.initial_state = initial_state + self.current_state = self.initial_state + self.next_state = None + self.action = None + self.memory = memory + + def reset (self): + + '''This sets the current_state to the initial_state and sets + input_symbol to None. The initial state was set by the constructor + __init__(). ''' + + self.current_state = self.initial_state + self.input_symbol = None + + def add_transition (self, input_symbol, state, action=None, next_state=None): + + '''This adds a transition that associates: + + (input_symbol, current_state) --> (action, next_state) + + The action may be set to None in which case the process() method will + ignore the action and only set the next_state. The next_state may be + set to None in which case the current state will be unchanged. + + You can also set transitions for a list of symbols by using + add_transition_list(). ''' + + if next_state is None: + next_state = state + self.state_transitions[(input_symbol, state)] = (action, next_state) + + def add_transition_list (self, list_input_symbols, state, action=None, next_state=None): + + '''This adds the same transition for a list of input symbols. + You can pass a list or a string. Note that it is handy to use + string.digits, string.whitespace, string.letters, etc. to add + transitions that match character classes. + + The action may be set to None in which case the process() method will + ignore the action and only set the next_state. The next_state may be + set to None in which case the current state will be unchanged. ''' + + if next_state is None: + next_state = state + for input_symbol in list_input_symbols: + self.add_transition (input_symbol, state, action, next_state) + + def add_transition_any (self, state, action=None, next_state=None): + + '''This adds a transition that associates: + + (current_state) --> (action, next_state) + + That is, any input symbol will match the current state. + The process() method checks the "any" state associations after it first + checks for an exact match of (input_symbol, current_state). + + The action may be set to None in which case the process() method will + ignore the action and only set the next_state. The next_state may be + set to None in which case the current state will be unchanged. ''' + + if next_state is None: + next_state = state + self.state_transitions_any [state] = (action, next_state) + + def set_default_transition (self, action, next_state): + + '''This sets the default transition. This defines an action and + next_state if the FSM cannot find the input symbol and the current + state in the transition list and if the FSM cannot find the + current_state in the transition_any list. This is useful as a final + fall-through state for catching errors and undefined states. + + The default transition can be removed by setting the attribute + default_transition to None. ''' + + self.default_transition = (action, next_state) + + def get_transition (self, input_symbol, state): + + '''This returns (action, next state) given an input_symbol and state. + This does not modify the FSM state, so calling this method has no side + effects. Normally you do not call this method directly. It is called by + process(). + + The sequence of steps to check for a defined transition goes from the + most specific to the least specific. + + 1. Check state_transitions[] that match exactly the tuple, + (input_symbol, state) + + 2. Check state_transitions_any[] that match (state) + In other words, match a specific state and ANY input_symbol. + + 3. Check if the default_transition is defined. + This catches any input_symbol and any state. + This is a handler for errors, undefined states, or defaults. + + 4. No transition was defined. If we get here then raise an exception. + ''' + + if (input_symbol, state) in self.state_transitions: + return self.state_transitions[(input_symbol, state)] + elif state in self.state_transitions_any: + return self.state_transitions_any[state] + elif self.default_transition is not None: + return self.default_transition + else: + raise ExceptionFSM ('Transition is undefined: (%s, %s).' % + (str(input_symbol), str(state)) ) + + def process (self, input_symbol): + + '''This is the main method that you call to process input. This may + cause the FSM to change state and call an action. This method calls + get_transition() to find the action and next_state associated with the + input_symbol and current_state. If the action is None then the action + is not called and only the current state is changed. This method + processes one complete input symbol. You can process a list of symbols + (or a string) by calling process_list(). ''' + + self.input_symbol = input_symbol + (self.action, self.next_state) = self.get_transition (self.input_symbol, self.current_state) + if self.action is not None: + self.action (self) + self.current_state = self.next_state + self.next_state = None + + def process_list (self, input_symbols): + + '''This takes a list and sends each element to process(). The list may + be a string or any iterable object. ''' + + for s in input_symbols: + self.process (s) + +############################################################################## +# The following is an example that demonstrates the use of the FSM class to +# process an RPN expression. Run this module from the command line. You will +# get a prompt > for input. Enter an RPN Expression. Numbers may be integers. +# Operators are * / + - Use the = sign to evaluate and print the expression. +# For example: +# +# 167 3 2 2 * * * 1 - = +# +# will print: +# +# 2003 +############################################################################## + +import sys +import string + +PY3 = (sys.version_info[0] >= 3) + +# +# These define the actions. +# Note that "memory" is a list being used as a stack. +# + +def BeginBuildNumber (fsm): + fsm.memory.append (fsm.input_symbol) + +def BuildNumber (fsm): + s = fsm.memory.pop () + s = s + fsm.input_symbol + fsm.memory.append (s) + +def EndBuildNumber (fsm): + s = fsm.memory.pop () + fsm.memory.append (int(s)) + +def DoOperator (fsm): + ar = fsm.memory.pop() + al = fsm.memory.pop() + if fsm.input_symbol == '+': + fsm.memory.append (al + ar) + elif fsm.input_symbol == '-': + fsm.memory.append (al - ar) + elif fsm.input_symbol == '*': + fsm.memory.append (al * ar) + elif fsm.input_symbol == '/': + fsm.memory.append (al / ar) + +def DoEqual (fsm): + print(str(fsm.memory.pop())) + +def Error (fsm): + print('That does not compute.') + print(str(fsm.input_symbol)) + +def main(): + + '''This is where the example starts and the FSM state transitions are + defined. Note that states are strings (such as 'INIT'). This is not + necessary, but it makes the example easier to read. ''' + + f = FSM ('INIT', []) + f.set_default_transition (Error, 'INIT') + f.add_transition_any ('INIT', None, 'INIT') + f.add_transition ('=', 'INIT', DoEqual, 'INIT') + f.add_transition_list (string.digits, 'INIT', BeginBuildNumber, 'BUILDING_NUMBER') + f.add_transition_list (string.digits, 'BUILDING_NUMBER', BuildNumber, 'BUILDING_NUMBER') + f.add_transition_list (string.whitespace, 'BUILDING_NUMBER', EndBuildNumber, 'INIT') + f.add_transition_list ('+-*/', 'INIT', DoOperator, 'INIT') + + print() + print('Enter an RPN Expression.') + print('Numbers may be integers. Operators are * / + -') + print('Use the = sign to evaluate and print the expression.') + print('For example: ') + print(' 167 3 2 2 * * * 1 - =') + inputstr = (input if PY3 else raw_input)('> ') # analysis:ignore + f.process_list(inputstr) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.10/site-packages/pexpect/__init__.py b/myenv/lib/python3.10/site-packages/pexpect/__init__.py new file mode 100644 index 000000000..7e3045378 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/__init__.py @@ -0,0 +1,85 @@ +'''Pexpect is a Python module for spawning child applications and controlling +them automatically. Pexpect can be used for automating interactive applications +such as ssh, ftp, passwd, telnet, etc. It can be used to a automate setup +scripts for duplicating software package installations on different servers. It +can be used for automated software testing. Pexpect is in the spirit of Don +Libes' Expect, but Pexpect is pure Python. Other Expect-like modules for Python +require TCL and Expect or require C extensions to be compiled. Pexpect does not +use C, Expect, or TCL extensions. It should work on any platform that supports +the standard Python pty module. The Pexpect interface focuses on ease of use so +that simple tasks are easy. + +There are two main interfaces to the Pexpect system; these are the function, +run() and the class, spawn. The spawn class is more powerful. The run() +function is simpler than spawn, and is good for quickly calling program. When +you call the run() function it executes a given program and then returns the +output. This is a handy replacement for os.system(). + +For example:: + + pexpect.run('ls -la') + +The spawn class is the more powerful interface to the Pexpect system. You can +use this to spawn a child program then interact with it by sending input and +expecting responses (waiting for patterns in the child's output). + +For example:: + + child = pexpect.spawn('scp foo user@example.com:.') + child.expect('Password:') + child.sendline(mypassword) + +This works even for commands that ask for passwords or other input outside of +the normal stdio streams. For example, ssh reads input directly from the TTY +device which bypasses stdin. + +Credits: Noah Spurrier, Richard Holden, Marco Molteni, Kimberley Burchett, +Robert Stone, Hartmut Goebel, Chad Schroeder, Erick Tryzelaar, Dave Kirby, Ids +vander Molen, George Todd, Noel Taylor, Nicolas D. Cesar, Alexander Gattin, +Jacques-Etienne Baudoux, Geoffrey Marshall, Francisco Lourenco, Glen Mabey, +Karthik Gurusamy, Fernando Perez, Corey Minyard, Jon Cohen, Guillaume +Chazarain, Andrew Ryan, Nick Craig-Wood, Andrew Stone, Jorgen Grahn, John +Spiegel, Jan Grant, and Shane Kerr. Let me know if I forgot anyone. + +Pexpect is free, open source, and all that good stuff. +http://pexpect.sourceforge.net/ + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +''' + +import sys +PY3 = (sys.version_info[0] >= 3) + +from .exceptions import ExceptionPexpect, EOF, TIMEOUT +from .utils import split_command_line, which, is_executable_file +from .expect import Expecter, searcher_re, searcher_string + +if sys.platform != 'win32': + # On Unix, these are available at the top level for backwards compatibility + from .pty_spawn import spawn, spawnu + from .run import run, runu + +__version__ = '4.8.0' +__revision__ = '' +__all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'spawnu', 'run', 'runu', + 'which', 'split_command_line', '__version__', '__revision__'] + + + +# vim: set shiftround expandtab tabstop=4 shiftwidth=4 ft=python autoindent : diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/ANSI.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/ANSI.cpython-310.pyc new file mode 100644 index 000000000..e5ad686b0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/ANSI.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/FSM.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/FSM.cpython-310.pyc new file mode 100644 index 000000000..ee69274a8 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/FSM.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..93ab4e619 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/_async.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/_async.cpython-310.pyc new file mode 100644 index 000000000..f517d7f11 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/_async.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/exceptions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 000000000..044746745 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/exceptions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/expect.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/expect.cpython-310.pyc new file mode 100644 index 000000000..a8fd8b36d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/expect.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/fdpexpect.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/fdpexpect.cpython-310.pyc new file mode 100644 index 000000000..313f090d6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/fdpexpect.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/popen_spawn.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/popen_spawn.cpython-310.pyc new file mode 100644 index 000000000..0949d4b72 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/popen_spawn.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/pty_spawn.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/pty_spawn.cpython-310.pyc new file mode 100644 index 000000000..18a1f1077 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/pty_spawn.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/pxssh.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/pxssh.cpython-310.pyc new file mode 100644 index 000000000..b6f9dda92 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/pxssh.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/replwrap.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/replwrap.cpython-310.pyc new file mode 100644 index 000000000..addb7fd4b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/replwrap.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/run.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/run.cpython-310.pyc new file mode 100644 index 000000000..132479b9b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/run.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/screen.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/screen.cpython-310.pyc new file mode 100644 index 000000000..cd1ade592 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/screen.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/spawnbase.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/spawnbase.cpython-310.pyc new file mode 100644 index 000000000..c81ad4eb2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/spawnbase.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/__pycache__/utils.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/utils.cpython-310.pyc new file mode 100644 index 000000000..1421d4a10 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pexpect/__pycache__/utils.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pexpect/_async.py b/myenv/lib/python3.10/site-packages/pexpect/_async.py new file mode 100644 index 000000000..dfbfeef5f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/_async.py @@ -0,0 +1,103 @@ +import asyncio +import errno +import signal + +from pexpect import EOF + +@asyncio.coroutine +def expect_async(expecter, timeout=None): + # First process data that was previously read - if it maches, we don't need + # async stuff. + idx = expecter.existing_data() + if idx is not None: + return idx + if not expecter.spawn.async_pw_transport: + pw = PatternWaiter() + pw.set_expecter(expecter) + transport, pw = yield from asyncio.get_event_loop()\ + .connect_read_pipe(lambda: pw, expecter.spawn) + expecter.spawn.async_pw_transport = pw, transport + else: + pw, transport = expecter.spawn.async_pw_transport + pw.set_expecter(expecter) + transport.resume_reading() + try: + return (yield from asyncio.wait_for(pw.fut, timeout)) + except asyncio.TimeoutError as e: + transport.pause_reading() + return expecter.timeout(e) + +@asyncio.coroutine +def repl_run_command_async(repl, cmdlines, timeout=-1): + res = [] + repl.child.sendline(cmdlines[0]) + for line in cmdlines[1:]: + yield from repl._expect_prompt(timeout=timeout, async_=True) + res.append(repl.child.before) + repl.child.sendline(line) + + # Command was fully submitted, now wait for the next prompt + prompt_idx = yield from repl._expect_prompt(timeout=timeout, async_=True) + if prompt_idx == 1: + # We got the continuation prompt - command was incomplete + repl.child.kill(signal.SIGINT) + yield from repl._expect_prompt(timeout=1, async_=True) + raise ValueError("Continuation prompt found - input was incomplete:") + return u''.join(res + [repl.child.before]) + +class PatternWaiter(asyncio.Protocol): + transport = None + + def set_expecter(self, expecter): + self.expecter = expecter + self.fut = asyncio.Future() + + def found(self, result): + if not self.fut.done(): + self.fut.set_result(result) + self.transport.pause_reading() + + def error(self, exc): + if not self.fut.done(): + self.fut.set_exception(exc) + self.transport.pause_reading() + + def connection_made(self, transport): + self.transport = transport + + def data_received(self, data): + spawn = self.expecter.spawn + s = spawn._decoder.decode(data) + spawn._log(s, 'read') + + if self.fut.done(): + spawn._before.write(s) + spawn._buffer.write(s) + return + + try: + index = self.expecter.new_data(s) + if index is not None: + # Found a match + self.found(index) + except Exception as e: + self.expecter.errored() + self.error(e) + + def eof_received(self): + # N.B. If this gets called, async will close the pipe (the spawn object) + # for us + try: + self.expecter.spawn.flag_eof = True + index = self.expecter.eof() + except EOF as e: + self.error(e) + else: + self.found(index) + + def connection_lost(self, exc): + if isinstance(exc, OSError) and exc.errno == errno.EIO: + # We may get here without eof_received being called, e.g on Linux + self.eof_received() + elif exc is not None: + self.error(exc) diff --git a/myenv/lib/python3.10/site-packages/pexpect/bashrc.sh b/myenv/lib/python3.10/site-packages/pexpect/bashrc.sh new file mode 100644 index 000000000..c734ac90b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/bashrc.sh @@ -0,0 +1,16 @@ +# Different platforms have different names for the systemwide bashrc +if [[ -f /etc/bashrc ]]; then + source /etc/bashrc +fi +if [[ -f /etc/bash.bashrc ]]; then + source /etc/bash.bashrc +fi +if [[ -f ~/.bashrc ]]; then + source ~/.bashrc +fi + +# Reset PS1 so pexpect can find it +PS1="$" + +# Unset PROMPT_COMMAND, so that it can't change PS1 to something unexpected. +unset PROMPT_COMMAND diff --git a/myenv/lib/python3.10/site-packages/pexpect/exceptions.py b/myenv/lib/python3.10/site-packages/pexpect/exceptions.py new file mode 100644 index 000000000..cb360f026 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/exceptions.py @@ -0,0 +1,35 @@ +"""Exception classes used by Pexpect""" + +import traceback +import sys + +class ExceptionPexpect(Exception): + '''Base class for all exceptions raised by this module. + ''' + + def __init__(self, value): + super(ExceptionPexpect, self).__init__(value) + self.value = value + + def __str__(self): + return str(self.value) + + def get_trace(self): + '''This returns an abbreviated stack trace with lines that only concern + the caller. In other words, the stack trace inside the Pexpect module + is not included. ''' + + tblist = traceback.extract_tb(sys.exc_info()[2]) + tblist = [item for item in tblist if ('pexpect/__init__' not in item[0]) + and ('pexpect/expect' not in item[0])] + tblist = traceback.format_list(tblist) + return ''.join(tblist) + + +class EOF(ExceptionPexpect): + '''Raised when EOF is read from a child. + This usually means the child has exited.''' + + +class TIMEOUT(ExceptionPexpect): + '''Raised when a read time exceeds the timeout. ''' diff --git a/myenv/lib/python3.10/site-packages/pexpect/expect.py b/myenv/lib/python3.10/site-packages/pexpect/expect.py new file mode 100644 index 000000000..d3409db9d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/expect.py @@ -0,0 +1,371 @@ +import time + +from .exceptions import EOF, TIMEOUT + +class Expecter(object): + def __init__(self, spawn, searcher, searchwindowsize=-1): + self.spawn = spawn + self.searcher = searcher + # A value of -1 means to use the figure from spawn, which should + # be None or a positive number. + if searchwindowsize == -1: + searchwindowsize = spawn.searchwindowsize + self.searchwindowsize = searchwindowsize + self.lookback = None + if hasattr(searcher, 'longest_string'): + self.lookback = searcher.longest_string + + def do_search(self, window, freshlen): + spawn = self.spawn + searcher = self.searcher + if freshlen > len(window): + freshlen = len(window) + index = searcher.search(window, freshlen, self.searchwindowsize) + if index >= 0: + spawn._buffer = spawn.buffer_type() + spawn._buffer.write(window[searcher.end:]) + spawn.before = spawn._before.getvalue()[ + 0:-(len(window) - searcher.start)] + spawn._before = spawn.buffer_type() + spawn._before.write(window[searcher.end:]) + spawn.after = window[searcher.start:searcher.end] + spawn.match = searcher.match + spawn.match_index = index + # Found a match + return index + elif self.searchwindowsize or self.lookback: + maintain = self.searchwindowsize or self.lookback + if spawn._buffer.tell() > maintain: + spawn._buffer = spawn.buffer_type() + spawn._buffer.write(window[-maintain:]) + + def existing_data(self): + # First call from a new call to expect_loop or expect_async. + # self.searchwindowsize may have changed. + # Treat all data as fresh. + spawn = self.spawn + before_len = spawn._before.tell() + buf_len = spawn._buffer.tell() + freshlen = before_len + if before_len > buf_len: + if not self.searchwindowsize: + spawn._buffer = spawn.buffer_type() + window = spawn._before.getvalue() + spawn._buffer.write(window) + elif buf_len < self.searchwindowsize: + spawn._buffer = spawn.buffer_type() + spawn._before.seek( + max(0, before_len - self.searchwindowsize)) + window = spawn._before.read() + spawn._buffer.write(window) + else: + spawn._buffer.seek(max(0, buf_len - self.searchwindowsize)) + window = spawn._buffer.read() + else: + if self.searchwindowsize: + spawn._buffer.seek(max(0, buf_len - self.searchwindowsize)) + window = spawn._buffer.read() + else: + window = spawn._buffer.getvalue() + return self.do_search(window, freshlen) + + def new_data(self, data): + # A subsequent call, after a call to existing_data. + spawn = self.spawn + freshlen = len(data) + spawn._before.write(data) + if not self.searchwindowsize: + if self.lookback: + # search lookback + new data. + old_len = spawn._buffer.tell() + spawn._buffer.write(data) + spawn._buffer.seek(max(0, old_len - self.lookback)) + window = spawn._buffer.read() + else: + # copy the whole buffer (really slow for large datasets). + spawn._buffer.write(data) + window = spawn.buffer + else: + if len(data) >= self.searchwindowsize or not spawn._buffer.tell(): + window = data[-self.searchwindowsize:] + spawn._buffer = spawn.buffer_type() + spawn._buffer.write(window[-self.searchwindowsize:]) + else: + spawn._buffer.write(data) + new_len = spawn._buffer.tell() + spawn._buffer.seek(max(0, new_len - self.searchwindowsize)) + window = spawn._buffer.read() + return self.do_search(window, freshlen) + + def eof(self, err=None): + spawn = self.spawn + + spawn.before = spawn._before.getvalue() + spawn._buffer = spawn.buffer_type() + spawn._before = spawn.buffer_type() + spawn.after = EOF + index = self.searcher.eof_index + if index >= 0: + spawn.match = EOF + spawn.match_index = index + return index + else: + spawn.match = None + spawn.match_index = None + msg = str(spawn) + msg += '\nsearcher: %s' % self.searcher + if err is not None: + msg = str(err) + '\n' + msg + + exc = EOF(msg) + exc.__cause__ = None # in Python 3.x we can use "raise exc from None" + raise exc + + def timeout(self, err=None): + spawn = self.spawn + + spawn.before = spawn._before.getvalue() + spawn.after = TIMEOUT + index = self.searcher.timeout_index + if index >= 0: + spawn.match = TIMEOUT + spawn.match_index = index + return index + else: + spawn.match = None + spawn.match_index = None + msg = str(spawn) + msg += '\nsearcher: %s' % self.searcher + if err is not None: + msg = str(err) + '\n' + msg + + exc = TIMEOUT(msg) + exc.__cause__ = None # in Python 3.x we can use "raise exc from None" + raise exc + + def errored(self): + spawn = self.spawn + spawn.before = spawn._before.getvalue() + spawn.after = None + spawn.match = None + spawn.match_index = None + + def expect_loop(self, timeout=-1): + """Blocking expect""" + spawn = self.spawn + + if timeout is not None: + end_time = time.time() + timeout + + try: + idx = self.existing_data() + if idx is not None: + return idx + while True: + # No match at this point + if (timeout is not None) and (timeout < 0): + return self.timeout() + # Still have time left, so read more data + incoming = spawn.read_nonblocking(spawn.maxread, timeout) + if self.spawn.delayafterread is not None: + time.sleep(self.spawn.delayafterread) + idx = self.new_data(incoming) + # Keep reading until exception or return. + if idx is not None: + return idx + if timeout is not None: + timeout = end_time - time.time() + except EOF as e: + return self.eof(e) + except TIMEOUT as e: + return self.timeout(e) + except: + self.errored() + raise + + +class searcher_string(object): + '''This is a plain string search helper for the spawn.expect_any() method. + This helper class is for speed. For more powerful regex patterns + see the helper class, searcher_re. + + Attributes: + + eof_index - index of EOF, or -1 + timeout_index - index of TIMEOUT, or -1 + + After a successful match by the search() method the following attributes + are available: + + start - index into the buffer, first byte of match + end - index into the buffer, first byte after match + match - the matching string itself + + ''' + + def __init__(self, strings): + '''This creates an instance of searcher_string. This argument 'strings' + may be a list; a sequence of strings; or the EOF or TIMEOUT types. ''' + + self.eof_index = -1 + self.timeout_index = -1 + self._strings = [] + self.longest_string = 0 + for n, s in enumerate(strings): + if s is EOF: + self.eof_index = n + continue + if s is TIMEOUT: + self.timeout_index = n + continue + self._strings.append((n, s)) + if len(s) > self.longest_string: + self.longest_string = len(s) + + def __str__(self): + '''This returns a human-readable string that represents the state of + the object.''' + + ss = [(ns[0], ' %d: %r' % ns) for ns in self._strings] + ss.append((-1, 'searcher_string:')) + if self.eof_index >= 0: + ss.append((self.eof_index, ' %d: EOF' % self.eof_index)) + if self.timeout_index >= 0: + ss.append((self.timeout_index, + ' %d: TIMEOUT' % self.timeout_index)) + ss.sort() + ss = list(zip(*ss))[1] + return '\n'.join(ss) + + def search(self, buffer, freshlen, searchwindowsize=None): + '''This searches 'buffer' for the first occurrence of one of the search + strings. 'freshlen' must indicate the number of bytes at the end of + 'buffer' which have not been searched before. It helps to avoid + searching the same, possibly big, buffer over and over again. + + See class spawn for the 'searchwindowsize' argument. + + If there is a match this returns the index of that string, and sets + 'start', 'end' and 'match'. Otherwise, this returns -1. ''' + + first_match = None + + # 'freshlen' helps a lot here. Further optimizations could + # possibly include: + # + # using something like the Boyer-Moore Fast String Searching + # Algorithm; pre-compiling the search through a list of + # strings into something that can scan the input once to + # search for all N strings; realize that if we search for + # ['bar', 'baz'] and the input is '...foo' we need not bother + # rescanning until we've read three more bytes. + # + # Sadly, I don't know enough about this interesting topic. /grahn + + for index, s in self._strings: + if searchwindowsize is None: + # the match, if any, can only be in the fresh data, + # or at the very end of the old data + offset = -(freshlen + len(s)) + else: + # better obey searchwindowsize + offset = -searchwindowsize + n = buffer.find(s, offset) + if n >= 0 and (first_match is None or n < first_match): + first_match = n + best_index, best_match = index, s + if first_match is None: + return -1 + self.match = best_match + self.start = first_match + self.end = self.start + len(self.match) + return best_index + + +class searcher_re(object): + '''This is regular expression string search helper for the + spawn.expect_any() method. This helper class is for powerful + pattern matching. For speed, see the helper class, searcher_string. + + Attributes: + + eof_index - index of EOF, or -1 + timeout_index - index of TIMEOUT, or -1 + + After a successful match by the search() method the following attributes + are available: + + start - index into the buffer, first byte of match + end - index into the buffer, first byte after match + match - the re.match object returned by a successful re.search + + ''' + + def __init__(self, patterns): + '''This creates an instance that searches for 'patterns' Where + 'patterns' may be a list or other sequence of compiled regular + expressions, or the EOF or TIMEOUT types.''' + + self.eof_index = -1 + self.timeout_index = -1 + self._searches = [] + for n, s in enumerate(patterns): + if s is EOF: + self.eof_index = n + continue + if s is TIMEOUT: + self.timeout_index = n + continue + self._searches.append((n, s)) + + def __str__(self): + '''This returns a human-readable string that represents the state of + the object.''' + + #ss = [(n, ' %d: re.compile("%s")' % + # (n, repr(s.pattern))) for n, s in self._searches] + ss = list() + for n, s in self._searches: + ss.append((n, ' %d: re.compile(%r)' % (n, s.pattern))) + ss.append((-1, 'searcher_re:')) + if self.eof_index >= 0: + ss.append((self.eof_index, ' %d: EOF' % self.eof_index)) + if self.timeout_index >= 0: + ss.append((self.timeout_index, ' %d: TIMEOUT' % + self.timeout_index)) + ss.sort() + ss = list(zip(*ss))[1] + return '\n'.join(ss) + + def search(self, buffer, freshlen, searchwindowsize=None): + '''This searches 'buffer' for the first occurrence of one of the regular + expressions. 'freshlen' must indicate the number of bytes at the end of + 'buffer' which have not been searched before. + + See class spawn for the 'searchwindowsize' argument. + + If there is a match this returns the index of that string, and sets + 'start', 'end' and 'match'. Otherwise, returns -1.''' + + first_match = None + # 'freshlen' doesn't help here -- we cannot predict the + # length of a match, and the re module provides no help. + if searchwindowsize is None: + searchstart = 0 + else: + searchstart = max(0, len(buffer) - searchwindowsize) + for index, s in self._searches: + match = s.search(buffer, searchstart) + if match is None: + continue + n = match.start() + if first_match is None or n < first_match: + first_match = n + the_match = match + best_index = index + if first_match is None: + return -1 + self.start = first_match + self.match = the_match + self.end = self.match.end() + return best_index diff --git a/myenv/lib/python3.10/site-packages/pexpect/fdpexpect.py b/myenv/lib/python3.10/site-packages/pexpect/fdpexpect.py new file mode 100644 index 000000000..cddd50e10 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/fdpexpect.py @@ -0,0 +1,148 @@ +'''This is like pexpect, but it will work with any file descriptor that you +pass it. You are responsible for opening and close the file descriptor. +This allows you to use Pexpect with sockets and named pipes (FIFOs). + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +''' + +from .spawnbase import SpawnBase +from .exceptions import ExceptionPexpect, TIMEOUT +from .utils import select_ignore_interrupts, poll_ignore_interrupts +import os + +__all__ = ['fdspawn'] + +class fdspawn(SpawnBase): + '''This is like pexpect.spawn but allows you to supply your own open file + descriptor. For example, you could use it to read through a file looking + for patterns, or to control a modem or serial device. ''' + + def __init__ (self, fd, args=None, timeout=30, maxread=2000, searchwindowsize=None, + logfile=None, encoding=None, codec_errors='strict', use_poll=False): + '''This takes a file descriptor (an int) or an object that support the + fileno() method (returning an int). All Python file-like objects + support fileno(). ''' + + if type(fd) != type(0) and hasattr(fd, 'fileno'): + fd = fd.fileno() + + if type(fd) != type(0): + raise ExceptionPexpect('The fd argument is not an int. If this is a command string then maybe you want to use pexpect.spawn.') + + try: # make sure fd is a valid file descriptor + os.fstat(fd) + except OSError: + raise ExceptionPexpect('The fd argument is not a valid file descriptor.') + + self.args = None + self.command = None + SpawnBase.__init__(self, timeout, maxread, searchwindowsize, logfile, + encoding=encoding, codec_errors=codec_errors) + self.child_fd = fd + self.own_fd = False + self.closed = False + self.name = '' % fd + self.use_poll = use_poll + + def close (self): + """Close the file descriptor. + + Calling this method a second time does nothing, but if the file + descriptor was closed elsewhere, :class:`OSError` will be raised. + """ + if self.child_fd == -1: + return + + self.flush() + os.close(self.child_fd) + self.child_fd = -1 + self.closed = True + + def isalive (self): + '''This checks if the file descriptor is still valid. If :func:`os.fstat` + does not raise an exception then we assume it is alive. ''' + + if self.child_fd == -1: + return False + try: + os.fstat(self.child_fd) + return True + except: + return False + + def terminate (self, force=False): # pragma: no cover + '''Deprecated and invalid. Just raises an exception.''' + raise ExceptionPexpect('This method is not valid for file descriptors.') + + # These four methods are left around for backwards compatibility, but not + # documented as part of fdpexpect. You're encouraged to use os.write + # directly. + def send(self, s): + "Write to fd, return number of bytes written" + s = self._coerce_send_string(s) + self._log(s, 'send') + + b = self._encoder.encode(s, final=False) + return os.write(self.child_fd, b) + + def sendline(self, s): + "Write to fd with trailing newline, return number of bytes written" + s = self._coerce_send_string(s) + return self.send(s + self.linesep) + + def write(self, s): + "Write to fd, return None" + self.send(s) + + def writelines(self, sequence): + "Call self.write() for each item in sequence" + for s in sequence: + self.write(s) + + def read_nonblocking(self, size=1, timeout=-1): + """ + Read from the file descriptor and return the result as a string. + + The read_nonblocking method of :class:`SpawnBase` assumes that a call + to os.read will not block (timeout parameter is ignored). This is not + the case for POSIX file-like objects such as sockets and serial ports. + + Use :func:`select.select`, timeout is implemented conditionally for + POSIX systems. + + :param int size: Read at most *size* bytes. + :param int timeout: Wait timeout seconds for file descriptor to be + ready to read. When -1 (default), use self.timeout. When 0, poll. + :return: String containing the bytes read + """ + if os.name == 'posix': + if timeout == -1: + timeout = self.timeout + rlist = [self.child_fd] + wlist = [] + xlist = [] + if self.use_poll: + rlist = poll_ignore_interrupts(rlist, timeout) + else: + rlist, wlist, xlist = select_ignore_interrupts( + rlist, wlist, xlist, timeout + ) + if self.child_fd not in rlist: + raise TIMEOUT('Timeout exceeded.') + return super(fdspawn, self).read_nonblocking(size) diff --git a/myenv/lib/python3.10/site-packages/pexpect/popen_spawn.py b/myenv/lib/python3.10/site-packages/pexpect/popen_spawn.py new file mode 100644 index 000000000..4bb58cfe7 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/popen_spawn.py @@ -0,0 +1,188 @@ +"""Provides an interface like pexpect.spawn interface using subprocess.Popen +""" +import os +import threading +import subprocess +import sys +import time +import signal +import shlex + +try: + from queue import Queue, Empty # Python 3 +except ImportError: + from Queue import Queue, Empty # Python 2 + +from .spawnbase import SpawnBase, PY3 +from .exceptions import EOF +from .utils import string_types + +class PopenSpawn(SpawnBase): + def __init__(self, cmd, timeout=30, maxread=2000, searchwindowsize=None, + logfile=None, cwd=None, env=None, encoding=None, + codec_errors='strict', preexec_fn=None): + super(PopenSpawn, self).__init__(timeout=timeout, maxread=maxread, + searchwindowsize=searchwindowsize, logfile=logfile, + encoding=encoding, codec_errors=codec_errors) + + # Note that `SpawnBase` initializes `self.crlf` to `\r\n` + # because the default behaviour for a PTY is to convert + # incoming LF to `\r\n` (see the `onlcr` flag and + # https://stackoverflow.com/a/35887657/5397009). Here we set + # it to `os.linesep` because that is what the spawned + # application outputs by default and `popen` doesn't translate + # anything. + if encoding is None: + self.crlf = os.linesep.encode ("ascii") + else: + self.crlf = self.string_type (os.linesep) + + kwargs = dict(bufsize=0, stdin=subprocess.PIPE, + stderr=subprocess.STDOUT, stdout=subprocess.PIPE, + cwd=cwd, preexec_fn=preexec_fn, env=env) + + if sys.platform == 'win32': + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + kwargs['startupinfo'] = startupinfo + kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP + + if isinstance(cmd, string_types) and sys.platform != 'win32': + cmd = shlex.split(cmd, posix=os.name == 'posix') + + self.proc = subprocess.Popen(cmd, **kwargs) + self.pid = self.proc.pid + self.closed = False + self._buf = self.string_type() + + self._read_queue = Queue() + self._read_thread = threading.Thread(target=self._read_incoming) + self._read_thread.setDaemon(True) + self._read_thread.start() + + _read_reached_eof = False + + def read_nonblocking(self, size, timeout): + buf = self._buf + if self._read_reached_eof: + # We have already finished reading. Use up any buffered data, + # then raise EOF + if buf: + self._buf = buf[size:] + return buf[:size] + else: + self.flag_eof = True + raise EOF('End Of File (EOF).') + + if timeout == -1: + timeout = self.timeout + elif timeout is None: + timeout = 1e6 + + t0 = time.time() + while (time.time() - t0) < timeout and size and len(buf) < size: + try: + incoming = self._read_queue.get_nowait() + except Empty: + break + else: + if incoming is None: + self._read_reached_eof = True + break + + buf += self._decoder.decode(incoming, final=False) + + r, self._buf = buf[:size], buf[size:] + + self._log(r, 'read') + return r + + def _read_incoming(self): + """Run in a thread to move output from a pipe to a queue.""" + fileno = self.proc.stdout.fileno() + while 1: + buf = b'' + try: + buf = os.read(fileno, 1024) + except OSError as e: + self._log(e, 'read') + + if not buf: + # This indicates we have reached EOF + self._read_queue.put(None) + return + + self._read_queue.put(buf) + + def write(self, s): + '''This is similar to send() except that there is no return value. + ''' + self.send(s) + + def writelines(self, sequence): + '''This calls write() for each element in the sequence. + + The sequence can be any iterable object producing strings, typically a + list of strings. This does not add line separators. There is no return + value. + ''' + for s in sequence: + self.send(s) + + def send(self, s): + '''Send data to the subprocess' stdin. + + Returns the number of bytes written. + ''' + s = self._coerce_send_string(s) + self._log(s, 'send') + + b = self._encoder.encode(s, final=False) + if PY3: + return self.proc.stdin.write(b) + else: + # On Python 2, .write() returns None, so we return the length of + # bytes written ourselves. This assumes they all got written. + self.proc.stdin.write(b) + return len(b) + + def sendline(self, s=''): + '''Wraps send(), sending string ``s`` to child process, with os.linesep + automatically appended. Returns number of bytes written. ''' + + n = self.send(s) + return n + self.send(self.linesep) + + def wait(self): + '''Wait for the subprocess to finish. + + Returns the exit code. + ''' + status = self.proc.wait() + if status >= 0: + self.exitstatus = status + self.signalstatus = None + else: + self.exitstatus = None + self.signalstatus = -status + self.terminated = True + return status + + def kill(self, sig): + '''Sends a Unix signal to the subprocess. + + Use constants from the :mod:`signal` module to specify which signal. + ''' + if sys.platform == 'win32': + if sig in [signal.SIGINT, signal.CTRL_C_EVENT]: + sig = signal.CTRL_C_EVENT + elif sig in [signal.SIGBREAK, signal.CTRL_BREAK_EVENT]: + sig = signal.CTRL_BREAK_EVENT + else: + sig = signal.SIGTERM + + os.kill(self.proc.pid, sig) + + def sendeof(self): + '''Closes the stdin pipe from the writing end.''' + self.proc.stdin.close() diff --git a/myenv/lib/python3.10/site-packages/pexpect/pty_spawn.py b/myenv/lib/python3.10/site-packages/pexpect/pty_spawn.py new file mode 100644 index 000000000..8e28ca7cd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/pty_spawn.py @@ -0,0 +1,860 @@ +import os +import sys +import time +import pty +import tty +import errno +import signal +from contextlib import contextmanager + +import ptyprocess +from ptyprocess.ptyprocess import use_native_pty_fork + +from .exceptions import ExceptionPexpect, EOF, TIMEOUT +from .spawnbase import SpawnBase +from .utils import ( + which, split_command_line, select_ignore_interrupts, poll_ignore_interrupts +) + +@contextmanager +def _wrap_ptyprocess_err(): + """Turn ptyprocess errors into our own ExceptionPexpect errors""" + try: + yield + except ptyprocess.PtyProcessError as e: + raise ExceptionPexpect(*e.args) + +PY3 = (sys.version_info[0] >= 3) + +class spawn(SpawnBase): + '''This is the main class interface for Pexpect. Use this class to start + and control child applications. ''' + + # This is purely informational now - changing it has no effect + use_native_pty_fork = use_native_pty_fork + + def __init__(self, command, args=[], timeout=30, maxread=2000, + searchwindowsize=None, logfile=None, cwd=None, env=None, + ignore_sighup=False, echo=True, preexec_fn=None, + encoding=None, codec_errors='strict', dimensions=None, + use_poll=False): + '''This is the constructor. The command parameter may be a string that + includes a command and any arguments to the command. For example:: + + child = pexpect.spawn('/usr/bin/ftp') + child = pexpect.spawn('/usr/bin/ssh user@example.com') + child = pexpect.spawn('ls -latr /tmp') + + You may also construct it with a list of arguments like so:: + + child = pexpect.spawn('/usr/bin/ftp', []) + child = pexpect.spawn('/usr/bin/ssh', ['user@example.com']) + child = pexpect.spawn('ls', ['-latr', '/tmp']) + + After this the child application will be created and will be ready to + talk to. For normal use, see expect() and send() and sendline(). + + Remember that Pexpect does NOT interpret shell meta characters such as + redirect, pipe, or wild cards (``>``, ``|``, or ``*``). This is a + common mistake. If you want to run a command and pipe it through + another command then you must also start a shell. For example:: + + child = pexpect.spawn('/bin/bash -c "ls -l | grep LOG > logs.txt"') + child.expect(pexpect.EOF) + + The second form of spawn (where you pass a list of arguments) is useful + in situations where you wish to spawn a command and pass it its own + argument list. This can make syntax more clear. For example, the + following is equivalent to the previous example:: + + shell_cmd = 'ls -l | grep LOG > logs.txt' + child = pexpect.spawn('/bin/bash', ['-c', shell_cmd]) + child.expect(pexpect.EOF) + + The maxread attribute sets the read buffer size. This is maximum number + of bytes that Pexpect will try to read from a TTY at one time. Setting + the maxread size to 1 will turn off buffering. Setting the maxread + value higher may help performance in cases where large amounts of + output are read back from the child. This feature is useful in + conjunction with searchwindowsize. + + When the keyword argument *searchwindowsize* is None (default), the + full buffer is searched at each iteration of receiving incoming data. + The default number of bytes scanned at each iteration is very large + and may be reduced to collaterally reduce search cost. After + :meth:`~.expect` returns, the full buffer attribute remains up to + size *maxread* irrespective of *searchwindowsize* value. + + When the keyword argument ``timeout`` is specified as a number, + (default: *30*), then :class:`TIMEOUT` will be raised after the value + specified has elapsed, in seconds, for any of the :meth:`~.expect` + family of method calls. When None, TIMEOUT will not be raised, and + :meth:`~.expect` may block indefinitely until match. + + + The logfile member turns on or off logging. All input and output will + be copied to the given file object. Set logfile to None to stop + logging. This is the default. Set logfile to sys.stdout to echo + everything to standard output. The logfile is flushed after each write. + + Example log input and output to a file:: + + child = pexpect.spawn('some_command') + fout = open('mylog.txt','wb') + child.logfile = fout + + Example log to stdout:: + + # In Python 2: + child = pexpect.spawn('some_command') + child.logfile = sys.stdout + + # In Python 3, we'll use the ``encoding`` argument to decode data + # from the subprocess and handle it as unicode: + child = pexpect.spawn('some_command', encoding='utf-8') + child.logfile = sys.stdout + + The logfile_read and logfile_send members can be used to separately log + the input from the child and output sent to the child. Sometimes you + don't want to see everything you write to the child. You only want to + log what the child sends back. For example:: + + child = pexpect.spawn('some_command') + child.logfile_read = sys.stdout + + You will need to pass an encoding to spawn in the above code if you are + using Python 3. + + To separately log output sent to the child use logfile_send:: + + child.logfile_send = fout + + If ``ignore_sighup`` is True, the child process will ignore SIGHUP + signals. The default is False from Pexpect 4.0, meaning that SIGHUP + will be handled normally by the child. + + The delaybeforesend helps overcome a weird behavior that many users + were experiencing. The typical problem was that a user would expect() a + "Password:" prompt and then immediately call sendline() to send the + password. The user would then see that their password was echoed back + to them. Passwords don't normally echo. The problem is caused by the + fact that most applications print out the "Password" prompt and then + turn off stdin echo, but if you send your password before the + application turned off echo, then you get your password echoed. + Normally this wouldn't be a problem when interacting with a human at a + real keyboard. If you introduce a slight delay just before writing then + this seems to clear up the problem. This was such a common problem for + many users that I decided that the default pexpect behavior should be + to sleep just before writing to the child application. 1/20th of a + second (50 ms) seems to be enough to clear up the problem. You can set + delaybeforesend to None to return to the old behavior. + + Note that spawn is clever about finding commands on your path. + It uses the same logic that "which" uses to find executables. + + If you wish to get the exit status of the child you must call the + close() method. The exit or signal status of the child will be stored + in self.exitstatus or self.signalstatus. If the child exited normally + then exitstatus will store the exit return code and signalstatus will + be None. If the child was terminated abnormally with a signal then + signalstatus will store the signal value and exitstatus will be None:: + + child = pexpect.spawn('some_command') + child.close() + print(child.exitstatus, child.signalstatus) + + If you need more detail you can also read the self.status member which + stores the status returned by os.waitpid. You can interpret this using + os.WIFEXITED/os.WEXITSTATUS or os.WIFSIGNALED/os.TERMSIG. + + The echo attribute may be set to False to disable echoing of input. + As a pseudo-terminal, all input echoed by the "keyboard" (send() + or sendline()) will be repeated to output. For many cases, it is + not desirable to have echo enabled, and it may be later disabled + using setecho(False) followed by waitnoecho(). However, for some + platforms such as Solaris, this is not possible, and should be + disabled immediately on spawn. + + If preexec_fn is given, it will be called in the child process before + launching the given command. This is useful to e.g. reset inherited + signal handlers. + + The dimensions attribute specifies the size of the pseudo-terminal as + seen by the subprocess, and is specified as a two-entry tuple (rows, + columns). If this is unspecified, the defaults in ptyprocess will apply. + + The use_poll attribute enables using select.poll() over select.select() + for socket handling. This is handy if your system could have > 1024 fds + ''' + super(spawn, self).__init__(timeout=timeout, maxread=maxread, searchwindowsize=searchwindowsize, + logfile=logfile, encoding=encoding, codec_errors=codec_errors) + self.STDIN_FILENO = pty.STDIN_FILENO + self.STDOUT_FILENO = pty.STDOUT_FILENO + self.STDERR_FILENO = pty.STDERR_FILENO + self.str_last_chars = 100 + self.cwd = cwd + self.env = env + self.echo = echo + self.ignore_sighup = ignore_sighup + self.__irix_hack = sys.platform.lower().startswith('irix') + if command is None: + self.command = None + self.args = None + self.name = '' + else: + self._spawn(command, args, preexec_fn, dimensions) + self.use_poll = use_poll + + def __str__(self): + '''This returns a human-readable string that represents the state of + the object. ''' + + s = [] + s.append(repr(self)) + s.append('command: ' + str(self.command)) + s.append('args: %r' % (self.args,)) + s.append('buffer (last %s chars): %r' % (self.str_last_chars,self.buffer[-self.str_last_chars:])) + s.append('before (last %s chars): %r' % (self.str_last_chars,self.before[-self.str_last_chars:] if self.before else '')) + s.append('after: %r' % (self.after,)) + s.append('match: %r' % (self.match,)) + s.append('match_index: ' + str(self.match_index)) + s.append('exitstatus: ' + str(self.exitstatus)) + if hasattr(self, 'ptyproc'): + s.append('flag_eof: ' + str(self.flag_eof)) + s.append('pid: ' + str(self.pid)) + s.append('child_fd: ' + str(self.child_fd)) + s.append('closed: ' + str(self.closed)) + s.append('timeout: ' + str(self.timeout)) + s.append('delimiter: ' + str(self.delimiter)) + s.append('logfile: ' + str(self.logfile)) + s.append('logfile_read: ' + str(self.logfile_read)) + s.append('logfile_send: ' + str(self.logfile_send)) + s.append('maxread: ' + str(self.maxread)) + s.append('ignorecase: ' + str(self.ignorecase)) + s.append('searchwindowsize: ' + str(self.searchwindowsize)) + s.append('delaybeforesend: ' + str(self.delaybeforesend)) + s.append('delayafterclose: ' + str(self.delayafterclose)) + s.append('delayafterterminate: ' + str(self.delayafterterminate)) + return '\n'.join(s) + + def _spawn(self, command, args=[], preexec_fn=None, dimensions=None): + '''This starts the given command in a child process. This does all the + fork/exec type of stuff for a pty. This is called by __init__. If args + is empty then command will be parsed (split on spaces) and args will be + set to parsed arguments. ''' + + # The pid and child_fd of this object get set by this method. + # Note that it is difficult for this method to fail. + # You cannot detect if the child process cannot start. + # So the only way you can tell if the child process started + # or not is to try to read from the file descriptor. If you get + # EOF immediately then it means that the child is already dead. + # That may not necessarily be bad because you may have spawned a child + # that performs some task; creates no stdout output; and then dies. + + # If command is an int type then it may represent a file descriptor. + if isinstance(command, type(0)): + raise ExceptionPexpect('Command is an int type. ' + + 'If this is a file descriptor then maybe you want to ' + + 'use fdpexpect.fdspawn which takes an existing ' + + 'file descriptor instead of a command string.') + + if not isinstance(args, type([])): + raise TypeError('The argument, args, must be a list.') + + if args == []: + self.args = split_command_line(command) + self.command = self.args[0] + else: + # Make a shallow copy of the args list. + self.args = args[:] + self.args.insert(0, command) + self.command = command + + command_with_path = which(self.command, env=self.env) + if command_with_path is None: + raise ExceptionPexpect('The command was not found or was not ' + + 'executable: %s.' % self.command) + self.command = command_with_path + self.args[0] = self.command + + self.name = '<' + ' '.join(self.args) + '>' + + assert self.pid is None, 'The pid member must be None.' + assert self.command is not None, 'The command member must not be None.' + + kwargs = {'echo': self.echo, 'preexec_fn': preexec_fn} + if self.ignore_sighup: + def preexec_wrapper(): + "Set SIGHUP to be ignored, then call the real preexec_fn" + signal.signal(signal.SIGHUP, signal.SIG_IGN) + if preexec_fn is not None: + preexec_fn() + kwargs['preexec_fn'] = preexec_wrapper + + if dimensions is not None: + kwargs['dimensions'] = dimensions + + if self.encoding is not None: + # Encode command line using the specified encoding + self.args = [a if isinstance(a, bytes) else a.encode(self.encoding) + for a in self.args] + + self.ptyproc = self._spawnpty(self.args, env=self.env, + cwd=self.cwd, **kwargs) + + self.pid = self.ptyproc.pid + self.child_fd = self.ptyproc.fd + + + self.terminated = False + self.closed = False + + def _spawnpty(self, args, **kwargs): + '''Spawn a pty and return an instance of PtyProcess.''' + return ptyprocess.PtyProcess.spawn(args, **kwargs) + + def close(self, force=True): + '''This closes the connection with the child application. Note that + calling close() more than once is valid. This emulates standard Python + behavior with files. Set force to True if you want to make sure that + the child is terminated (SIGKILL is sent if the child ignores SIGHUP + and SIGINT). ''' + + self.flush() + with _wrap_ptyprocess_err(): + # PtyProcessError may be raised if it is not possible to terminate + # the child. + self.ptyproc.close(force=force) + self.isalive() # Update exit status from ptyproc + self.child_fd = -1 + self.closed = True + + def isatty(self): + '''This returns True if the file descriptor is open and connected to a + tty(-like) device, else False. + + On SVR4-style platforms implementing streams, such as SunOS and HP-UX, + the child pty may not appear as a terminal device. This means + methods such as setecho(), setwinsize(), getwinsize() may raise an + IOError. ''' + + return os.isatty(self.child_fd) + + def waitnoecho(self, timeout=-1): + '''This waits until the terminal ECHO flag is set False. This returns + True if the echo mode is off. This returns False if the ECHO flag was + not set False before the timeout. This can be used to detect when the + child is waiting for a password. Usually a child application will turn + off echo mode when it is waiting for the user to enter a password. For + example, instead of expecting the "password:" prompt you can wait for + the child to set ECHO off:: + + p = pexpect.spawn('ssh user@example.com') + p.waitnoecho() + p.sendline(mypassword) + + If timeout==-1 then this method will use the value in self.timeout. + If timeout==None then this method to block until ECHO flag is False. + ''' + + if timeout == -1: + timeout = self.timeout + if timeout is not None: + end_time = time.time() + timeout + while True: + if not self.getecho(): + return True + if timeout < 0 and timeout is not None: + return False + if timeout is not None: + timeout = end_time - time.time() + time.sleep(0.1) + + def getecho(self): + '''This returns the terminal echo mode. This returns True if echo is + on or False if echo is off. Child applications that are expecting you + to enter a password often set ECHO False. See waitnoecho(). + + Not supported on platforms where ``isatty()`` returns False. ''' + return self.ptyproc.getecho() + + def setecho(self, state): + '''This sets the terminal echo mode on or off. Note that anything the + child sent before the echo will be lost, so you should be sure that + your input buffer is empty before you call setecho(). For example, the + following will work as expected:: + + p = pexpect.spawn('cat') # Echo is on by default. + p.sendline('1234') # We expect see this twice from the child... + p.expect(['1234']) # ... once from the tty echo... + p.expect(['1234']) # ... and again from cat itself. + p.setecho(False) # Turn off tty echo + p.sendline('abcd') # We will set this only once (echoed by cat). + p.sendline('wxyz') # We will set this only once (echoed by cat) + p.expect(['abcd']) + p.expect(['wxyz']) + + The following WILL NOT WORK because the lines sent before the setecho + will be lost:: + + p = pexpect.spawn('cat') + p.sendline('1234') + p.setecho(False) # Turn off tty echo + p.sendline('abcd') # We will set this only once (echoed by cat). + p.sendline('wxyz') # We will set this only once (echoed by cat) + p.expect(['1234']) + p.expect(['1234']) + p.expect(['abcd']) + p.expect(['wxyz']) + + + Not supported on platforms where ``isatty()`` returns False. + ''' + return self.ptyproc.setecho(state) + + def read_nonblocking(self, size=1, timeout=-1): + '''This reads at most size characters from the child application. It + includes a timeout. If the read does not complete within the timeout + period then a TIMEOUT exception is raised. If the end of file is read + then an EOF exception will be raised. If a logfile is specified, a + copy is written to that log. + + If timeout is None then the read may block indefinitely. + If timeout is -1 then the self.timeout value is used. If timeout is 0 + then the child is polled and if there is no data immediately ready + then this will raise a TIMEOUT exception. + + The timeout refers only to the amount of time to read at least one + character. This is not affected by the 'size' parameter, so if you call + read_nonblocking(size=100, timeout=30) and only one character is + available right away then one character will be returned immediately. + It will not wait for 30 seconds for another 99 characters to come in. + + On the other hand, if there are bytes available to read immediately, + all those bytes will be read (up to the buffer size). So, if the + buffer size is 1 megabyte and there is 1 megabyte of data available + to read, the buffer will be filled, regardless of timeout. + + This is a wrapper around os.read(). It uses select.select() or + select.poll() to implement the timeout. ''' + + if self.closed: + raise ValueError('I/O operation on closed file.') + + if self.use_poll: + def select(timeout): + return poll_ignore_interrupts([self.child_fd], timeout) + else: + def select(timeout): + return select_ignore_interrupts([self.child_fd], [], [], timeout)[0] + + # If there is data available to read right now, read as much as + # we can. We do this to increase performance if there are a lot + # of bytes to be read. This also avoids calling isalive() too + # often. See also: + # * https://github.com/pexpect/pexpect/pull/304 + # * http://trac.sagemath.org/ticket/10295 + if select(0): + try: + incoming = super(spawn, self).read_nonblocking(size) + except EOF: + # Maybe the child is dead: update some attributes in that case + self.isalive() + raise + while len(incoming) < size and select(0): + try: + incoming += super(spawn, self).read_nonblocking(size - len(incoming)) + except EOF: + # Maybe the child is dead: update some attributes in that case + self.isalive() + # Don't raise EOF, just return what we read so far. + return incoming + return incoming + + if timeout == -1: + timeout = self.timeout + + if not self.isalive(): + # The process is dead, but there may or may not be data + # available to read. Note that some systems such as Solaris + # do not give an EOF when the child dies. In fact, you can + # still try to read from the child_fd -- it will block + # forever or until TIMEOUT. For that reason, it's important + # to do this check before calling select() with timeout. + if select(0): + return super(spawn, self).read_nonblocking(size) + self.flag_eof = True + raise EOF('End Of File (EOF). Braindead platform.') + elif self.__irix_hack: + # Irix takes a long time before it realizes a child was terminated. + # Make sure that the timeout is at least 2 seconds. + # FIXME So does this mean Irix systems are forced to always have + # FIXME a 2 second delay when calling read_nonblocking? That sucks. + if timeout is not None and timeout < 2: + timeout = 2 + + # Because of the select(0) check above, we know that no data + # is available right now. But if a non-zero timeout is given + # (possibly timeout=None), we call select() with a timeout. + if (timeout != 0) and select(timeout): + return super(spawn, self).read_nonblocking(size) + + if not self.isalive(): + # Some platforms, such as Irix, will claim that their + # processes are alive; timeout on the select; and + # then finally admit that they are not alive. + self.flag_eof = True + raise EOF('End of File (EOF). Very slow platform.') + else: + raise TIMEOUT('Timeout exceeded.') + + def write(self, s): + '''This is similar to send() except that there is no return value. + ''' + + self.send(s) + + def writelines(self, sequence): + '''This calls write() for each element in the sequence. The sequence + can be any iterable object producing strings, typically a list of + strings. This does not add line separators. There is no return value. + ''' + + for s in sequence: + self.write(s) + + def send(self, s): + '''Sends string ``s`` to the child process, returning the number of + bytes written. If a logfile is specified, a copy is written to that + log. + + The default terminal input mode is canonical processing unless set + otherwise by the child process. This allows backspace and other line + processing to be performed prior to transmitting to the receiving + program. As this is buffered, there is a limited size of such buffer. + + On Linux systems, this is 4096 (defined by N_TTY_BUF_SIZE). All + other systems honor the POSIX.1 definition PC_MAX_CANON -- 1024 + on OSX, 256 on OpenSolaris, and 1920 on FreeBSD. + + This value may be discovered using fpathconf(3):: + + >>> from os import fpathconf + >>> print(fpathconf(0, 'PC_MAX_CANON')) + 256 + + On such a system, only 256 bytes may be received per line. Any + subsequent bytes received will be discarded. BEL (``'\a'``) is then + sent to output if IMAXBEL (termios.h) is set by the tty driver. + This is usually enabled by default. Linux does not honor this as + an option -- it behaves as though it is always set on. + + Canonical input processing may be disabled altogether by executing + a shell, then stty(1), before executing the final program:: + + >>> bash = pexpect.spawn('/bin/bash', echo=False) + >>> bash.sendline('stty -icanon') + >>> bash.sendline('base64') + >>> bash.sendline('x' * 5000) + ''' + + if self.delaybeforesend is not None: + time.sleep(self.delaybeforesend) + + s = self._coerce_send_string(s) + self._log(s, 'send') + + b = self._encoder.encode(s, final=False) + return os.write(self.child_fd, b) + + def sendline(self, s=''): + '''Wraps send(), sending string ``s`` to child process, with + ``os.linesep`` automatically appended. Returns number of bytes + written. Only a limited number of bytes may be sent for each + line in the default terminal mode, see docstring of :meth:`send`. + ''' + s = self._coerce_send_string(s) + return self.send(s + self.linesep) + + def _log_control(self, s): + """Write control characters to the appropriate log files""" + if self.encoding is not None: + s = s.decode(self.encoding, 'replace') + self._log(s, 'send') + + def sendcontrol(self, char): + '''Helper method that wraps send() with mnemonic access for sending control + character to the child (such as Ctrl-C or Ctrl-D). For example, to send + Ctrl-G (ASCII 7, bell, '\a'):: + + child.sendcontrol('g') + + See also, sendintr() and sendeof(). + ''' + n, byte = self.ptyproc.sendcontrol(char) + self._log_control(byte) + return n + + def sendeof(self): + '''This sends an EOF to the child. This sends a character which causes + the pending parent output buffer to be sent to the waiting child + program without waiting for end-of-line. If it is the first character + of the line, the read() in the user program returns 0, which signifies + end-of-file. This means to work as expected a sendeof() has to be + called at the beginning of a line. This method does not send a newline. + It is the responsibility of the caller to ensure the eof is sent at the + beginning of a line. ''' + + n, byte = self.ptyproc.sendeof() + self._log_control(byte) + + def sendintr(self): + '''This sends a SIGINT to the child. It does not require + the SIGINT to be the first character on a line. ''' + + n, byte = self.ptyproc.sendintr() + self._log_control(byte) + + @property + def flag_eof(self): + return self.ptyproc.flag_eof + + @flag_eof.setter + def flag_eof(self, value): + self.ptyproc.flag_eof = value + + def eof(self): + '''This returns True if the EOF exception was ever raised. + ''' + return self.flag_eof + + def terminate(self, force=False): + '''This forces a child process to terminate. It starts nicely with + SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This + returns True if the child was terminated. This returns False if the + child could not be terminated. ''' + + if not self.isalive(): + return True + try: + self.kill(signal.SIGHUP) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + self.kill(signal.SIGCONT) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + self.kill(signal.SIGINT) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + if force: + self.kill(signal.SIGKILL) + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + else: + return False + return False + except OSError: + # I think there are kernel timing issues that sometimes cause + # this to happen. I think isalive() reports True, but the + # process is dead to the kernel. + # Make one last attempt to see if the kernel is up to date. + time.sleep(self.delayafterterminate) + if not self.isalive(): + return True + else: + return False + + def wait(self): + '''This waits until the child exits. This is a blocking call. This will + not read any data from the child, so this will block forever if the + child has unread output and has terminated. In other words, the child + may have printed output then called exit(), but, the child is + technically still alive until its output is read by the parent. + + This method is non-blocking if :meth:`wait` has already been called + previously or :meth:`isalive` method returns False. It simply returns + the previously determined exit status. + ''' + + ptyproc = self.ptyproc + with _wrap_ptyprocess_err(): + # exception may occur if "Is some other process attempting + # "job control with our child pid?" + exitstatus = ptyproc.wait() + self.status = ptyproc.status + self.exitstatus = ptyproc.exitstatus + self.signalstatus = ptyproc.signalstatus + self.terminated = True + + return exitstatus + + def isalive(self): + '''This tests if the child process is running or not. This is + non-blocking. If the child was terminated then this will read the + exitstatus or signalstatus of the child. This returns True if the child + process appears to be running or False if not. It can take literally + SECONDS for Solaris to return the right status. ''' + + ptyproc = self.ptyproc + with _wrap_ptyprocess_err(): + alive = ptyproc.isalive() + + if not alive: + self.status = ptyproc.status + self.exitstatus = ptyproc.exitstatus + self.signalstatus = ptyproc.signalstatus + self.terminated = True + + return alive + + def kill(self, sig): + + '''This sends the given signal to the child application. In keeping + with UNIX tradition it has a misleading name. It does not necessarily + kill the child unless you send the right signal. ''' + + # Same as os.kill, but the pid is given for you. + if self.isalive(): + os.kill(self.pid, sig) + + def getwinsize(self): + '''This returns the terminal window size of the child tty. The return + value is a tuple of (rows, cols). ''' + return self.ptyproc.getwinsize() + + def setwinsize(self, rows, cols): + '''This sets the terminal window size of the child tty. This will cause + a SIGWINCH signal to be sent to the child. This does not change the + physical window size. It changes the size reported to TTY-aware + applications like vi or curses -- applications that respond to the + SIGWINCH signal. ''' + return self.ptyproc.setwinsize(rows, cols) + + + def interact(self, escape_character=chr(29), + input_filter=None, output_filter=None): + + '''This gives control of the child process to the interactive user (the + human at the keyboard). Keystrokes are sent to the child process, and + the stdout and stderr output of the child process is printed. This + simply echos the child stdout and child stderr to the real stdout and + it echos the real stdin to the child stdin. When the user types the + escape_character this method will return None. The escape_character + will not be transmitted. The default for escape_character is + entered as ``Ctrl - ]``, the very same as BSD telnet. To prevent + escaping, escape_character may be set to None. + + If a logfile is specified, then the data sent and received from the + child process in interact mode is duplicated to the given log. + + You may pass in optional input and output filter functions. These + functions should take bytes array and return bytes array too. Even + with ``encoding='utf-8'`` support, meth:`interact` will always pass + input_filter and output_filter bytes. You may need to wrap your + function to decode and encode back to UTF-8. + + The output_filter will be passed all the output from the child process. + The input_filter will be passed all the keyboard input from the user. + The input_filter is run BEFORE the check for the escape_character. + + Note that if you change the window size of the parent the SIGWINCH + signal will not be passed through to the child. If you want the child + window size to change when the parent's window size changes then do + something like the following example:: + + import pexpect, struct, fcntl, termios, signal, sys + def sigwinch_passthrough (sig, data): + s = struct.pack("HHHH", 0, 0, 0, 0) + a = struct.unpack('hhhh', fcntl.ioctl(sys.stdout.fileno(), + termios.TIOCGWINSZ , s)) + if not p.closed: + p.setwinsize(a[0],a[1]) + + # Note this 'p' is global and used in sigwinch_passthrough. + p = pexpect.spawn('/bin/bash') + signal.signal(signal.SIGWINCH, sigwinch_passthrough) + p.interact() + ''' + + # Flush the buffer. + self.write_to_stdout(self.buffer) + self.stdout.flush() + self._buffer = self.buffer_type() + mode = tty.tcgetattr(self.STDIN_FILENO) + tty.setraw(self.STDIN_FILENO) + if escape_character is not None and PY3: + escape_character = escape_character.encode('latin-1') + try: + self.__interact_copy(escape_character, input_filter, output_filter) + finally: + tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode) + + def __interact_writen(self, fd, data): + '''This is used by the interact() method. + ''' + + while data != b'' and self.isalive(): + n = os.write(fd, data) + data = data[n:] + + def __interact_read(self, fd): + '''This is used by the interact() method. + ''' + + return os.read(fd, 1000) + + def __interact_copy( + self, escape_character=None, input_filter=None, output_filter=None + ): + + '''This is used by the interact() method. + ''' + + while self.isalive(): + if self.use_poll: + r = poll_ignore_interrupts([self.child_fd, self.STDIN_FILENO]) + else: + r, w, e = select_ignore_interrupts( + [self.child_fd, self.STDIN_FILENO], [], [] + ) + if self.child_fd in r: + try: + data = self.__interact_read(self.child_fd) + except OSError as err: + if err.args[0] == errno.EIO: + # Linux-style EOF + break + raise + if data == b'': + # BSD-style EOF + break + if output_filter: + data = output_filter(data) + self._log(data, 'read') + os.write(self.STDOUT_FILENO, data) + if self.STDIN_FILENO in r: + data = self.__interact_read(self.STDIN_FILENO) + if input_filter: + data = input_filter(data) + i = -1 + if escape_character is not None: + i = data.rfind(escape_character) + if i != -1: + data = data[:i] + if data: + self._log(data, 'send') + self.__interact_writen(self.child_fd, data) + break + self._log(data, 'send') + self.__interact_writen(self.child_fd, data) + + +def spawnu(*args, **kwargs): + """Deprecated: pass encoding to spawn() instead.""" + kwargs.setdefault('encoding', 'utf-8') + return spawn(*args, **kwargs) diff --git a/myenv/lib/python3.10/site-packages/pexpect/pxssh.py b/myenv/lib/python3.10/site-packages/pexpect/pxssh.py new file mode 100644 index 000000000..3d53bd974 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/pxssh.py @@ -0,0 +1,537 @@ +'''This class extends pexpect.spawn to specialize setting up SSH connections. +This adds methods for login, logout, and expecting the shell prompt. + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +''' + +from pexpect import ExceptionPexpect, TIMEOUT, EOF, spawn +import time +import os +import sys +import re + +__all__ = ['ExceptionPxssh', 'pxssh'] + +# Exception classes used by this module. +class ExceptionPxssh(ExceptionPexpect): + '''Raised for pxssh exceptions. + ''' + +if sys.version_info > (3, 0): + from shlex import quote +else: + _find_unsafe = re.compile(r'[^\w@%+=:,./-]').search + + def quote(s): + """Return a shell-escaped version of the string *s*.""" + if not s: + return "''" + if _find_unsafe(s) is None: + return s + + # use single quotes, and put single quotes into double quotes + # the string $'b is then quoted as '$'"'"'b' + return "'" + s.replace("'", "'\"'\"'") + "'" + +class pxssh (spawn): + '''This class extends pexpect.spawn to specialize setting up SSH + connections. This adds methods for login, logout, and expecting the shell + prompt. It does various tricky things to handle many situations in the SSH + login process. For example, if the session is your first login, then pxssh + automatically accepts the remote certificate; or if you have public key + authentication setup then pxssh won't wait for the password prompt. + + pxssh uses the shell prompt to synchronize output from the remote host. In + order to make this more robust it sets the shell prompt to something more + unique than just $ or #. This should work on most Borne/Bash or Csh style + shells. + + Example that runs a few commands on a remote server and prints the result:: + + from pexpect import pxssh + import getpass + try: + s = pxssh.pxssh() + hostname = raw_input('hostname: ') + username = raw_input('username: ') + password = getpass.getpass('password: ') + s.login(hostname, username, password) + s.sendline('uptime') # run a command + s.prompt() # match the prompt + print(s.before) # print everything before the prompt. + s.sendline('ls -l') + s.prompt() + print(s.before) + s.sendline('df') + s.prompt() + print(s.before) + s.logout() + except pxssh.ExceptionPxssh as e: + print("pxssh failed on login.") + print(e) + + Example showing how to specify SSH options:: + + from pexpect import pxssh + s = pxssh.pxssh(options={ + "StrictHostKeyChecking": "no", + "UserKnownHostsFile": "/dev/null"}) + ... + + Note that if you have ssh-agent running while doing development with pxssh + then this can lead to a lot of confusion. Many X display managers (xdm, + gdm, kdm, etc.) will automatically start a GUI agent. You may see a GUI + dialog box popup asking for a password during development. You should turn + off any key agents during testing. The 'force_password' attribute will turn + off public key authentication. This will only work if the remote SSH server + is configured to allow password logins. Example of using 'force_password' + attribute:: + + s = pxssh.pxssh() + s.force_password = True + hostname = raw_input('hostname: ') + username = raw_input('username: ') + password = getpass.getpass('password: ') + s.login (hostname, username, password) + + `debug_command_string` is only for the test suite to confirm that the string + generated for SSH is correct, using this will not allow you to do + anything other than get a string back from `pxssh.pxssh.login()`. + ''' + + def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, + logfile=None, cwd=None, env=None, ignore_sighup=True, echo=True, + options={}, encoding=None, codec_errors='strict', + debug_command_string=False, use_poll=False): + + spawn.__init__(self, None, timeout=timeout, maxread=maxread, + searchwindowsize=searchwindowsize, logfile=logfile, + cwd=cwd, env=env, ignore_sighup=ignore_sighup, echo=echo, + encoding=encoding, codec_errors=codec_errors, use_poll=use_poll) + + self.name = '' + + #SUBTLE HACK ALERT! Note that the command that SETS the prompt uses a + #slightly different string than the regular expression to match it. This + #is because when you set the prompt the command will echo back, but we + #don't want to match the echoed command. So if we make the set command + #slightly different than the regex we eliminate the problem. To make the + #set command different we add a backslash in front of $. The $ doesn't + #need to be escaped, but it doesn't hurt and serves to make the set + #prompt command different than the regex. + + # used to match the command-line prompt + self.UNIQUE_PROMPT = r"\[PEXPECT\][\$\#] " + self.PROMPT = self.UNIQUE_PROMPT + + # used to set shell command-line prompt to UNIQUE_PROMPT. + self.PROMPT_SET_SH = r"PS1='[PEXPECT]\$ '" + self.PROMPT_SET_CSH = r"set prompt='[PEXPECT]\$ '" + self.SSH_OPTS = ("-o'RSAAuthentication=no'" + + " -o 'PubkeyAuthentication=no'") +# Disabling host key checking, makes you vulnerable to MITM attacks. +# + " -o 'StrictHostKeyChecking=no'" +# + " -o 'UserKnownHostsFile /dev/null' ") + # Disabling X11 forwarding gets rid of the annoying SSH_ASKPASS from + # displaying a GUI password dialog. I have not figured out how to + # disable only SSH_ASKPASS without also disabling X11 forwarding. + # Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying! + #self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'" + self.force_password = False + + self.debug_command_string = debug_command_string + + # User defined SSH options, eg, + # ssh.otions = dict(StrictHostKeyChecking="no",UserKnownHostsFile="/dev/null") + self.options = options + + def levenshtein_distance(self, a, b): + '''This calculates the Levenshtein distance between a and b. + ''' + + n, m = len(a), len(b) + if n > m: + a,b = b,a + n,m = m,n + current = range(n+1) + for i in range(1,m+1): + previous, current = current, [i]+[0]*n + for j in range(1,n+1): + add, delete = previous[j]+1, current[j-1]+1 + change = previous[j-1] + if a[j-1] != b[i-1]: + change = change + 1 + current[j] = min(add, delete, change) + return current[n] + + def try_read_prompt(self, timeout_multiplier): + '''This facilitates using communication timeouts to perform + synchronization as quickly as possible, while supporting high latency + connections with a tunable worst case performance. Fast connections + should be read almost immediately. Worst case performance for this + method is timeout_multiplier * 3 seconds. + ''' + + # maximum time allowed to read the first response + first_char_timeout = timeout_multiplier * 0.5 + + # maximum time allowed between subsequent characters + inter_char_timeout = timeout_multiplier * 0.1 + + # maximum time for reading the entire prompt + total_timeout = timeout_multiplier * 3.0 + + prompt = self.string_type() + begin = time.time() + expired = 0.0 + timeout = first_char_timeout + + while expired < total_timeout: + try: + prompt += self.read_nonblocking(size=1, timeout=timeout) + expired = time.time() - begin # updated total time expired + timeout = inter_char_timeout + except TIMEOUT: + break + + return prompt + + def sync_original_prompt (self, sync_multiplier=1.0): + '''This attempts to find the prompt. Basically, press enter and record + the response; press enter again and record the response; if the two + responses are similar then assume we are at the original prompt. + This can be a slow function. Worst case with the default sync_multiplier + can take 12 seconds. Low latency connections are more likely to fail + with a low sync_multiplier. Best case sync time gets worse with a + high sync multiplier (500 ms with default). ''' + + # All of these timing pace values are magic. + # I came up with these based on what seemed reliable for + # connecting to a heavily loaded machine I have. + self.sendline() + time.sleep(0.1) + + try: + # Clear the buffer before getting the prompt. + self.try_read_prompt(sync_multiplier) + except TIMEOUT: + pass + + self.sendline() + x = self.try_read_prompt(sync_multiplier) + + self.sendline() + a = self.try_read_prompt(sync_multiplier) + + self.sendline() + b = self.try_read_prompt(sync_multiplier) + + ld = self.levenshtein_distance(a,b) + len_a = len(a) + if len_a == 0: + return False + if float(ld)/len_a < 0.4: + return True + return False + + ### TODO: This is getting messy and I'm pretty sure this isn't perfect. + ### TODO: I need to draw a flow chart for this. + ### TODO: Unit tests for SSH tunnels, remote SSH command exec, disabling original prompt sync + def login (self, server, username=None, password='', terminal_type='ansi', + original_prompt=r"[#$]", login_timeout=10, port=None, + auto_prompt_reset=True, ssh_key=None, quiet=True, + sync_multiplier=1, check_local_ip=True, + password_regex=r'(?i)(?:password:)|(?:passphrase for key)', + ssh_tunnels={}, spawn_local_ssh=True, + sync_original_prompt=True, ssh_config=None, cmd='ssh'): + '''This logs the user into the given server. + + It uses 'original_prompt' to try to find the prompt right after login. + When it finds the prompt it immediately tries to reset the prompt to + something more easily matched. The default 'original_prompt' is very + optimistic and is easily fooled. It's more reliable to try to match the original + prompt as exactly as possible to prevent false matches by server + strings such as the "Message Of The Day". On many systems you can + disable the MOTD on the remote server by creating a zero-length file + called :file:`~/.hushlogin` on the remote server. If a prompt cannot be found + then this will not necessarily cause the login to fail. In the case of + a timeout when looking for the prompt we assume that the original + prompt was so weird that we could not match it, so we use a few tricks + to guess when we have reached the prompt. Then we hope for the best and + blindly try to reset the prompt to something more unique. If that fails + then login() raises an :class:`ExceptionPxssh` exception. + + In some situations it is not possible or desirable to reset the + original prompt. In this case, pass ``auto_prompt_reset=False`` to + inhibit setting the prompt to the UNIQUE_PROMPT. Remember that pxssh + uses a unique prompt in the :meth:`prompt` method. If the original prompt is + not reset then this will disable the :meth:`prompt` method unless you + manually set the :attr:`PROMPT` attribute. + + Set ``password_regex`` if there is a MOTD message with `password` in it. + Changing this is like playing in traffic, don't (p)expect it to match straight + away. + + If you require to connect to another SSH server from the your original SSH + connection set ``spawn_local_ssh`` to `False` and this will use your current + session to do so. Setting this option to `False` and not having an active session + will trigger an error. + + Set ``ssh_key`` to a file path to an SSH private key to use that SSH key + for the session authentication. + Set ``ssh_key`` to `True` to force passing the current SSH authentication socket + to the desired ``hostname``. + + Set ``ssh_config`` to a file path string of an SSH client config file to pass that + file to the client to handle itself. You may set any options you wish in here, however + doing so will require you to post extra information that you may not want to if you + run into issues. + + Alter the ``cmd`` to change the ssh client used, or to prepend it with network + namespaces. For example ```cmd="ip netns exec vlan2 ssh"``` to execute the ssh in + network namespace named ```vlan```. + ''' + + session_regex_array = ["(?i)are you sure you want to continue connecting", original_prompt, password_regex, "(?i)permission denied", "(?i)terminal type", TIMEOUT] + session_init_regex_array = [] + session_init_regex_array.extend(session_regex_array) + session_init_regex_array.extend(["(?i)connection closed by remote host", EOF]) + + ssh_options = ''.join([" -o '%s=%s'" % (o, v) for (o, v) in self.options.items()]) + if quiet: + ssh_options = ssh_options + ' -q' + if not check_local_ip: + ssh_options = ssh_options + " -o'NoHostAuthenticationForLocalhost=yes'" + if self.force_password: + ssh_options = ssh_options + ' ' + self.SSH_OPTS + if ssh_config is not None: + if spawn_local_ssh and not os.path.isfile(ssh_config): + raise ExceptionPxssh('SSH config does not exist or is not a file.') + ssh_options = ssh_options + ' -F ' + ssh_config + if port is not None: + ssh_options = ssh_options + ' -p %s'%(str(port)) + if ssh_key is not None: + # Allow forwarding our SSH key to the current session + if ssh_key==True: + ssh_options = ssh_options + ' -A' + else: + if spawn_local_ssh and not os.path.isfile(ssh_key): + raise ExceptionPxssh('private ssh key does not exist or is not a file.') + ssh_options = ssh_options + ' -i %s' % (ssh_key) + + # SSH tunnels, make sure you know what you're putting into the lists + # under each heading. Do not expect these to open 100% of the time, + # The port you're requesting might be bound. + # + # The structure should be like this: + # { 'local': ['2424:localhost:22'], # Local SSH tunnels + # 'remote': ['2525:localhost:22'], # Remote SSH tunnels + # 'dynamic': [8888] } # Dynamic/SOCKS tunnels + if ssh_tunnels!={} and isinstance({},type(ssh_tunnels)): + tunnel_types = { + 'local':'L', + 'remote':'R', + 'dynamic':'D' + } + for tunnel_type in tunnel_types: + cmd_type = tunnel_types[tunnel_type] + if tunnel_type in ssh_tunnels: + tunnels = ssh_tunnels[tunnel_type] + for tunnel in tunnels: + if spawn_local_ssh==False: + tunnel = quote(str(tunnel)) + ssh_options = ssh_options + ' -' + cmd_type + ' ' + str(tunnel) + + if username is not None: + ssh_options = ssh_options + ' -l ' + username + elif ssh_config is None: + raise TypeError('login() needs either a username or an ssh_config') + else: # make sure ssh_config has an entry for the server with a username + with open(ssh_config, 'rt') as f: + lines = [l.strip() for l in f.readlines()] + + server_regex = r'^Host\s+%s\s*$' % server + user_regex = r'^User\s+\w+\s*$' + config_has_server = False + server_has_username = False + for line in lines: + if not config_has_server and re.match(server_regex, line, re.IGNORECASE): + config_has_server = True + elif config_has_server and 'hostname' in line.lower(): + pass + elif config_has_server and 'host' in line.lower(): + server_has_username = False # insurance + break # we have left the relevant section + elif config_has_server and re.match(user_regex, line, re.IGNORECASE): + server_has_username = True + break + + if lines: + del line + + del lines + + if not config_has_server: + raise TypeError('login() ssh_config has no Host entry for %s' % server) + elif not server_has_username: + raise TypeError('login() ssh_config has no user entry for %s' % server) + + cmd += " %s %s" % (ssh_options, server) + if self.debug_command_string: + return(cmd) + + # Are we asking for a local ssh command or to spawn one in another session? + if spawn_local_ssh: + spawn._spawn(self, cmd) + else: + self.sendline(cmd) + + # This does not distinguish between a remote server 'password' prompt + # and a local ssh 'passphrase' prompt (for unlocking a private key). + i = self.expect(session_init_regex_array, timeout=login_timeout) + + # First phase + if i==0: + # New certificate -- always accept it. + # This is what you get if SSH does not have the remote host's + # public key stored in the 'known_hosts' cache. + self.sendline("yes") + i = self.expect(session_regex_array) + if i==2: # password or passphrase + self.sendline(password) + i = self.expect(session_regex_array) + if i==4: + self.sendline(terminal_type) + i = self.expect(session_regex_array) + if i==7: + self.close() + raise ExceptionPxssh('Could not establish connection to host') + + # Second phase + if i==0: + # This is weird. This should not happen twice in a row. + self.close() + raise ExceptionPxssh('Weird error. Got "are you sure" prompt twice.') + elif i==1: # can occur if you have a public key pair set to authenticate. + ### TODO: May NOT be OK if expect() got tricked and matched a false prompt. + pass + elif i==2: # password prompt again + # For incorrect passwords, some ssh servers will + # ask for the password again, others return 'denied' right away. + # If we get the password prompt again then this means + # we didn't get the password right the first time. + self.close() + raise ExceptionPxssh('password refused') + elif i==3: # permission denied -- password was bad. + self.close() + raise ExceptionPxssh('permission denied') + elif i==4: # terminal type again? WTF? + self.close() + raise ExceptionPxssh('Weird error. Got "terminal type" prompt twice.') + elif i==5: # Timeout + #This is tricky... I presume that we are at the command-line prompt. + #It may be that the shell prompt was so weird that we couldn't match + #it. Or it may be that we couldn't log in for some other reason. I + #can't be sure, but it's safe to guess that we did login because if + #I presume wrong and we are not logged in then this should be caught + #later when I try to set the shell prompt. + pass + elif i==6: # Connection closed by remote host + self.close() + raise ExceptionPxssh('connection closed') + else: # Unexpected + self.close() + raise ExceptionPxssh('unexpected login response') + if sync_original_prompt: + if not self.sync_original_prompt(sync_multiplier): + self.close() + raise ExceptionPxssh('could not synchronize with original prompt') + # We appear to be in. + # set shell prompt to something unique. + if auto_prompt_reset: + if not self.set_unique_prompt(): + self.close() + raise ExceptionPxssh('could not set shell prompt ' + '(received: %r, expected: %r).' % ( + self.before, self.PROMPT,)) + return True + + def logout (self): + '''Sends exit to the remote shell. + + If there are stopped jobs then this automatically sends exit twice. + ''' + self.sendline("exit") + index = self.expect([EOF, "(?i)there are stopped jobs"]) + if index==1: + self.sendline("exit") + self.expect(EOF) + self.close() + + def prompt(self, timeout=-1): + '''Match the next shell prompt. + + This is little more than a short-cut to the :meth:`~pexpect.spawn.expect` + method. Note that if you called :meth:`login` with + ``auto_prompt_reset=False``, then before calling :meth:`prompt` you must + set the :attr:`PROMPT` attribute to a regex that it will use for + matching the prompt. + + Calling :meth:`prompt` will erase the contents of the :attr:`before` + attribute even if no prompt is ever matched. If timeout is not given or + it is set to -1 then self.timeout is used. + + :return: True if the shell prompt was matched, False if the timeout was + reached. + ''' + + if timeout == -1: + timeout = self.timeout + i = self.expect([self.PROMPT, TIMEOUT], timeout=timeout) + if i==1: + return False + return True + + def set_unique_prompt(self): + '''This sets the remote prompt to something more unique than ``#`` or ``$``. + This makes it easier for the :meth:`prompt` method to match the shell prompt + unambiguously. This method is called automatically by the :meth:`login` + method, but you may want to call it manually if you somehow reset the + shell prompt. For example, if you 'su' to a different user then you + will need to manually reset the prompt. This sends shell commands to + the remote host to set the prompt, so this assumes the remote host is + ready to receive commands. + + Alternatively, you may use your own prompt pattern. In this case you + should call :meth:`login` with ``auto_prompt_reset=False``; then set the + :attr:`PROMPT` attribute to a regular expression. After that, the + :meth:`prompt` method will try to match your prompt pattern. + ''' + + self.sendline("unset PROMPT_COMMAND") + self.sendline(self.PROMPT_SET_SH) # sh-style + i = self.expect ([TIMEOUT, self.PROMPT], timeout=10) + if i == 0: # csh-style + self.sendline(self.PROMPT_SET_CSH) + i = self.expect([TIMEOUT, self.PROMPT], timeout=10) + if i == 0: + return False + return True + +# vi:ts=4:sw=4:expandtab:ft=python: diff --git a/myenv/lib/python3.10/site-packages/pexpect/replwrap.py b/myenv/lib/python3.10/site-packages/pexpect/replwrap.py new file mode 100644 index 000000000..c930f1e4f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/replwrap.py @@ -0,0 +1,130 @@ +"""Generic wrapper for read-eval-print-loops, a.k.a. interactive shells +""" +import os.path +import signal +import sys + +import pexpect + +PY3 = (sys.version_info[0] >= 3) + +if PY3: + basestring = str + +PEXPECT_PROMPT = u'[PEXPECT_PROMPT>' +PEXPECT_CONTINUATION_PROMPT = u'[PEXPECT_PROMPT+' + +class REPLWrapper(object): + """Wrapper for a REPL. + + :param cmd_or_spawn: This can either be an instance of :class:`pexpect.spawn` + in which a REPL has already been started, or a str command to start a new + REPL process. + :param str orig_prompt: The prompt to expect at first. + :param str prompt_change: A command to change the prompt to something more + unique. If this is ``None``, the prompt will not be changed. This will + be formatted with the new and continuation prompts as positional + parameters, so you can use ``{}`` style formatting to insert them into + the command. + :param str new_prompt: The more unique prompt to expect after the change. + :param str extra_init_cmd: Commands to do extra initialisation, such as + disabling pagers. + """ + def __init__(self, cmd_or_spawn, orig_prompt, prompt_change, + new_prompt=PEXPECT_PROMPT, + continuation_prompt=PEXPECT_CONTINUATION_PROMPT, + extra_init_cmd=None): + if isinstance(cmd_or_spawn, basestring): + self.child = pexpect.spawn(cmd_or_spawn, echo=False, encoding='utf-8') + else: + self.child = cmd_or_spawn + if self.child.echo: + # Existing spawn instance has echo enabled, disable it + # to prevent our input from being repeated to output. + self.child.setecho(False) + self.child.waitnoecho() + + if prompt_change is None: + self.prompt = orig_prompt + else: + self.set_prompt(orig_prompt, + prompt_change.format(new_prompt, continuation_prompt)) + self.prompt = new_prompt + self.continuation_prompt = continuation_prompt + + self._expect_prompt() + + if extra_init_cmd is not None: + self.run_command(extra_init_cmd) + + def set_prompt(self, orig_prompt, prompt_change): + self.child.expect(orig_prompt) + self.child.sendline(prompt_change) + + def _expect_prompt(self, timeout=-1, async_=False): + return self.child.expect_exact([self.prompt, self.continuation_prompt], + timeout=timeout, async_=async_) + + def run_command(self, command, timeout=-1, async_=False): + """Send a command to the REPL, wait for and return output. + + :param str command: The command to send. Trailing newlines are not needed. + This should be a complete block of input that will trigger execution; + if a continuation prompt is found after sending input, :exc:`ValueError` + will be raised. + :param int timeout: How long to wait for the next prompt. -1 means the + default from the :class:`pexpect.spawn` object (default 30 seconds). + None means to wait indefinitely. + :param bool async_: On Python 3.4, or Python 3.3 with asyncio + installed, passing ``async_=True`` will make this return an + :mod:`asyncio` Future, which you can yield from to get the same + result that this method would normally give directly. + """ + # Split up multiline commands and feed them in bit-by-bit + cmdlines = command.splitlines() + # splitlines ignores trailing newlines - add it back in manually + if command.endswith('\n'): + cmdlines.append('') + if not cmdlines: + raise ValueError("No command was given") + + if async_: + from ._async import repl_run_command_async + return repl_run_command_async(self, cmdlines, timeout) + + res = [] + self.child.sendline(cmdlines[0]) + for line in cmdlines[1:]: + self._expect_prompt(timeout=timeout) + res.append(self.child.before) + self.child.sendline(line) + + # Command was fully submitted, now wait for the next prompt + if self._expect_prompt(timeout=timeout) == 1: + # We got the continuation prompt - command was incomplete + self.child.kill(signal.SIGINT) + self._expect_prompt(timeout=1) + raise ValueError("Continuation prompt found - input was incomplete:\n" + + command) + return u''.join(res + [self.child.before]) + +def python(command="python"): + """Start a Python shell and return a :class:`REPLWrapper` object.""" + return REPLWrapper(command, u">>> ", u"import sys; sys.ps1={0!r}; sys.ps2={1!r}") + +def bash(command="bash"): + """Start a bash shell and return a :class:`REPLWrapper` object.""" + bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh') + child = pexpect.spawn(command, ['--rcfile', bashrc], echo=False, + encoding='utf-8') + + # If the user runs 'env', the value of PS1 will be in the output. To avoid + # replwrap seeing that as the next prompt, we'll embed the marker characters + # for invisible characters in the prompt; these show up when inspecting the + # environment variable, but not when bash displays the prompt. + ps1 = PEXPECT_PROMPT[:5] + u'\\[\\]' + PEXPECT_PROMPT[5:] + ps2 = PEXPECT_CONTINUATION_PROMPT[:5] + u'\\[\\]' + PEXPECT_CONTINUATION_PROMPT[5:] + prompt_change = u"PS1='{0}' PS2='{1}' PROMPT_COMMAND=''".format(ps1, ps2) + + return REPLWrapper(child, u'\\$', prompt_change, + extra_init_cmd="export PAGER=cat") diff --git a/myenv/lib/python3.10/site-packages/pexpect/run.py b/myenv/lib/python3.10/site-packages/pexpect/run.py new file mode 100644 index 000000000..ff288a124 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/run.py @@ -0,0 +1,157 @@ +import sys +import types + +from .exceptions import EOF, TIMEOUT +from .pty_spawn import spawn + +def run(command, timeout=30, withexitstatus=False, events=None, + extra_args=None, logfile=None, cwd=None, env=None, **kwargs): + + ''' + This function runs the given command; waits for it to finish; then + returns all output as a string. STDERR is included in output. If the full + path to the command is not given then the path is searched. + + Note that lines are terminated by CR/LF (\\r\\n) combination even on + UNIX-like systems because this is the standard for pseudottys. If you set + 'withexitstatus' to true, then run will return a tuple of (command_output, + exitstatus). If 'withexitstatus' is false then this returns just + command_output. + + The run() function can often be used instead of creating a spawn instance. + For example, the following code uses spawn:: + + from pexpect import * + child = spawn('scp foo user@example.com:.') + child.expect('(?i)password') + child.sendline(mypassword) + + The previous code can be replace with the following:: + + from pexpect import * + run('scp foo user@example.com:.', events={'(?i)password': mypassword}) + + **Examples** + + Start the apache daemon on the local machine:: + + from pexpect import * + run("/usr/local/apache/bin/apachectl start") + + Check in a file using SVN:: + + from pexpect import * + run("svn ci -m 'automatic commit' my_file.py") + + Run a command and capture exit status:: + + from pexpect import * + (command_output, exitstatus) = run('ls -l /bin', withexitstatus=1) + + The following will run SSH and execute 'ls -l' on the remote machine. The + password 'secret' will be sent if the '(?i)password' pattern is ever seen:: + + run("ssh username@machine.example.com 'ls -l'", + events={'(?i)password':'secret\\n'}) + + This will start mencoder to rip a video from DVD. This will also display + progress ticks every 5 seconds as it runs. For example:: + + from pexpect import * + def print_ticks(d): + print d['event_count'], + run("mencoder dvd://1 -o video.avi -oac copy -ovc copy", + events={TIMEOUT:print_ticks}, timeout=5) + + The 'events' argument should be either a dictionary or a tuple list that + contains patterns and responses. Whenever one of the patterns is seen + in the command output, run() will send the associated response string. + So, run() in the above example can be also written as: + + run("mencoder dvd://1 -o video.avi -oac copy -ovc copy", + events=[(TIMEOUT,print_ticks)], timeout=5) + + Use a tuple list for events if the command output requires a delicate + control over what pattern should be matched, since the tuple list is passed + to pexpect() as its pattern list, with the order of patterns preserved. + + Note that you should put newlines in your string if Enter is necessary. + + Like the example above, the responses may also contain a callback, either + a function or method. It should accept a dictionary value as an argument. + The dictionary contains all the locals from the run() function, so you can + access the child spawn object or any other variable defined in run() + (event_count, child, and extra_args are the most useful). A callback may + return True to stop the current run process. Otherwise run() continues + until the next event. A callback may also return a string which will be + sent to the child. 'extra_args' is not used by directly run(). It provides + a way to pass data to a callback function through run() through the locals + dictionary passed to a callback. + + Like :class:`spawn`, passing *encoding* will make it work with unicode + instead of bytes. You can pass *codec_errors* to control how errors in + encoding and decoding are handled. + ''' + if timeout == -1: + child = spawn(command, maxread=2000, logfile=logfile, cwd=cwd, env=env, + **kwargs) + else: + child = spawn(command, timeout=timeout, maxread=2000, logfile=logfile, + cwd=cwd, env=env, **kwargs) + if isinstance(events, list): + patterns= [x for x,y in events] + responses = [y for x,y in events] + elif isinstance(events, dict): + patterns = list(events.keys()) + responses = list(events.values()) + else: + # This assumes EOF or TIMEOUT will eventually cause run to terminate. + patterns = None + responses = None + child_result_list = [] + event_count = 0 + while True: + try: + index = child.expect(patterns) + if isinstance(child.after, child.allowed_string_types): + child_result_list.append(child.before + child.after) + else: + # child.after may have been a TIMEOUT or EOF, + # which we don't want appended to the list. + child_result_list.append(child.before) + if isinstance(responses[index], child.allowed_string_types): + child.send(responses[index]) + elif (isinstance(responses[index], types.FunctionType) or + isinstance(responses[index], types.MethodType)): + callback_result = responses[index](locals()) + sys.stdout.flush() + if isinstance(callback_result, child.allowed_string_types): + child.send(callback_result) + elif callback_result: + break + else: + raise TypeError("parameter `event' at index {index} must be " + "a string, method, or function: {value!r}" + .format(index=index, value=responses[index])) + event_count = event_count + 1 + except TIMEOUT: + child_result_list.append(child.before) + break + except EOF: + child_result_list.append(child.before) + break + child_result = child.string_type().join(child_result_list) + if withexitstatus: + child.close() + return (child_result, child.exitstatus) + else: + return child_result + +def runu(command, timeout=30, withexitstatus=False, events=None, + extra_args=None, logfile=None, cwd=None, env=None, **kwargs): + """Deprecated: pass encoding to run() instead. + """ + kwargs.setdefault('encoding', 'utf-8') + return run(command, timeout=timeout, withexitstatus=withexitstatus, + events=events, extra_args=extra_args, logfile=logfile, cwd=cwd, + env=env, **kwargs) diff --git a/myenv/lib/python3.10/site-packages/pexpect/screen.py b/myenv/lib/python3.10/site-packages/pexpect/screen.py new file mode 100644 index 000000000..79f95c4e5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/screen.py @@ -0,0 +1,431 @@ +'''This implements a virtual screen. This is used to support ANSI terminal +emulation. The screen representation and state is implemented in this class. +Most of the methods are inspired by ANSI screen control codes. The +:class:`~pexpect.ANSI.ANSI` class extends this class to add parsing of ANSI +escape codes. + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +''' + +import codecs +import copy +import sys + +import warnings + +warnings.warn(("pexpect.screen and pexpect.ANSI are deprecated. " + "We recommend using pyte to emulate a terminal screen: " + "https://pypi.python.org/pypi/pyte"), + stacklevel=2) + +NUL = 0 # Fill character; ignored on input. +ENQ = 5 # Transmit answerback message. +BEL = 7 # Ring the bell. +BS = 8 # Move cursor left. +HT = 9 # Move cursor to next tab stop. +LF = 10 # Line feed. +VT = 11 # Same as LF. +FF = 12 # Same as LF. +CR = 13 # Move cursor to left margin or newline. +SO = 14 # Invoke G1 character set. +SI = 15 # Invoke G0 character set. +XON = 17 # Resume transmission. +XOFF = 19 # Halt transmission. +CAN = 24 # Cancel escape sequence. +SUB = 26 # Same as CAN. +ESC = 27 # Introduce a control sequence. +DEL = 127 # Fill character; ignored on input. +SPACE = u' ' # Space or blank character. + +PY3 = (sys.version_info[0] >= 3) +if PY3: + unicode = str + +def constrain (n, min, max): + + '''This returns a number, n constrained to the min and max bounds. ''' + + if n < min: + return min + if n > max: + return max + return n + +class screen: + '''This object maintains the state of a virtual text screen as a + rectangular array. This maintains a virtual cursor position and handles + scrolling as characters are added. This supports most of the methods needed + by an ANSI text screen. Row and column indexes are 1-based (not zero-based, + like arrays). + + Characters are represented internally using unicode. Methods that accept + input characters, when passed 'bytes' (which in Python 2 is equivalent to + 'str'), convert them from the encoding specified in the 'encoding' + parameter to the constructor. Methods that return screen contents return + unicode strings, with the exception of __str__() under Python 2. Passing + ``encoding=None`` limits the API to only accept unicode input, so passing + bytes in will raise :exc:`TypeError`. + ''' + def __init__(self, r=24, c=80, encoding='latin-1', encoding_errors='replace'): + '''This initializes a blank screen of the given dimensions.''' + + self.rows = r + self.cols = c + self.encoding = encoding + self.encoding_errors = encoding_errors + if encoding is not None: + self.decoder = codecs.getincrementaldecoder(encoding)(encoding_errors) + else: + self.decoder = None + self.cur_r = 1 + self.cur_c = 1 + self.cur_saved_r = 1 + self.cur_saved_c = 1 + self.scroll_row_start = 1 + self.scroll_row_end = self.rows + self.w = [ [SPACE] * self.cols for _ in range(self.rows)] + + def _decode(self, s): + '''This converts from the external coding system (as passed to + the constructor) to the internal one (unicode). ''' + if self.decoder is not None: + return self.decoder.decode(s) + else: + raise TypeError("This screen was constructed with encoding=None, " + "so it does not handle bytes.") + + def _unicode(self): + '''This returns a printable representation of the screen as a unicode + string (which, under Python 3.x, is the same as 'str'). The end of each + screen line is terminated by a newline.''' + + return u'\n'.join ([ u''.join(c) for c in self.w ]) + + if PY3: + __str__ = _unicode + else: + __unicode__ = _unicode + + def __str__(self): + '''This returns a printable representation of the screen. The end of + each screen line is terminated by a newline. ''' + encoding = self.encoding or 'ascii' + return self._unicode().encode(encoding, 'replace') + + def dump (self): + '''This returns a copy of the screen as a unicode string. This is similar to + __str__/__unicode__ except that lines are not terminated with line + feeds.''' + + return u''.join ([ u''.join(c) for c in self.w ]) + + def pretty (self): + '''This returns a copy of the screen as a unicode string with an ASCII + text box around the screen border. This is similar to + __str__/__unicode__ except that it adds a box.''' + + top_bot = u'+' + u'-'*self.cols + u'+\n' + return top_bot + u'\n'.join([u'|'+line+u'|' for line in unicode(self).split(u'\n')]) + u'\n' + top_bot + + def fill (self, ch=SPACE): + + if isinstance(ch, bytes): + ch = self._decode(ch) + + self.fill_region (1,1,self.rows,self.cols, ch) + + def fill_region (self, rs,cs, re,ce, ch=SPACE): + + if isinstance(ch, bytes): + ch = self._decode(ch) + + rs = constrain (rs, 1, self.rows) + re = constrain (re, 1, self.rows) + cs = constrain (cs, 1, self.cols) + ce = constrain (ce, 1, self.cols) + if rs > re: + rs, re = re, rs + if cs > ce: + cs, ce = ce, cs + for r in range (rs, re+1): + for c in range (cs, ce + 1): + self.put_abs (r,c,ch) + + def cr (self): + '''This moves the cursor to the beginning (col 1) of the current row. + ''' + + self.cursor_home (self.cur_r, 1) + + def lf (self): + '''This moves the cursor down with scrolling. + ''' + + old_r = self.cur_r + self.cursor_down() + if old_r == self.cur_r: + self.scroll_up () + self.erase_line() + + def crlf (self): + '''This advances the cursor with CRLF properties. + The cursor will line wrap and the screen may scroll. + ''' + + self.cr () + self.lf () + + def newline (self): + '''This is an alias for crlf(). + ''' + + self.crlf() + + def put_abs (self, r, c, ch): + '''Screen array starts at 1 index.''' + + r = constrain (r, 1, self.rows) + c = constrain (c, 1, self.cols) + if isinstance(ch, bytes): + ch = self._decode(ch)[0] + else: + ch = ch[0] + self.w[r-1][c-1] = ch + + def put (self, ch): + '''This puts a characters at the current cursor position. + ''' + + if isinstance(ch, bytes): + ch = self._decode(ch) + + self.put_abs (self.cur_r, self.cur_c, ch) + + def insert_abs (self, r, c, ch): + '''This inserts a character at (r,c). Everything under + and to the right is shifted right one character. + The last character of the line is lost. + ''' + + if isinstance(ch, bytes): + ch = self._decode(ch) + + r = constrain (r, 1, self.rows) + c = constrain (c, 1, self.cols) + for ci in range (self.cols, c, -1): + self.put_abs (r,ci, self.get_abs(r,ci-1)) + self.put_abs (r,c,ch) + + def insert (self, ch): + + if isinstance(ch, bytes): + ch = self._decode(ch) + + self.insert_abs (self.cur_r, self.cur_c, ch) + + def get_abs (self, r, c): + + r = constrain (r, 1, self.rows) + c = constrain (c, 1, self.cols) + return self.w[r-1][c-1] + + def get (self): + + self.get_abs (self.cur_r, self.cur_c) + + def get_region (self, rs,cs, re,ce): + '''This returns a list of lines representing the region. + ''' + + rs = constrain (rs, 1, self.rows) + re = constrain (re, 1, self.rows) + cs = constrain (cs, 1, self.cols) + ce = constrain (ce, 1, self.cols) + if rs > re: + rs, re = re, rs + if cs > ce: + cs, ce = ce, cs + sc = [] + for r in range (rs, re+1): + line = u'' + for c in range (cs, ce + 1): + ch = self.get_abs (r,c) + line = line + ch + sc.append (line) + return sc + + def cursor_constrain (self): + '''This keeps the cursor within the screen area. + ''' + + self.cur_r = constrain (self.cur_r, 1, self.rows) + self.cur_c = constrain (self.cur_c, 1, self.cols) + + def cursor_home (self, r=1, c=1): # [{ROW};{COLUMN}H + + self.cur_r = r + self.cur_c = c + self.cursor_constrain () + + def cursor_back (self,count=1): # [{COUNT}D (not confused with down) + + self.cur_c = self.cur_c - count + self.cursor_constrain () + + def cursor_down (self,count=1): # [{COUNT}B (not confused with back) + + self.cur_r = self.cur_r + count + self.cursor_constrain () + + def cursor_forward (self,count=1): # [{COUNT}C + + self.cur_c = self.cur_c + count + self.cursor_constrain () + + def cursor_up (self,count=1): # [{COUNT}A + + self.cur_r = self.cur_r - count + self.cursor_constrain () + + def cursor_up_reverse (self): # M (called RI -- Reverse Index) + + old_r = self.cur_r + self.cursor_up() + if old_r == self.cur_r: + self.scroll_up() + + def cursor_force_position (self, r, c): # [{ROW};{COLUMN}f + '''Identical to Cursor Home.''' + + self.cursor_home (r, c) + + def cursor_save (self): # [s + '''Save current cursor position.''' + + self.cursor_save_attrs() + + def cursor_unsave (self): # [u + '''Restores cursor position after a Save Cursor.''' + + self.cursor_restore_attrs() + + def cursor_save_attrs (self): # 7 + '''Save current cursor position.''' + + self.cur_saved_r = self.cur_r + self.cur_saved_c = self.cur_c + + def cursor_restore_attrs (self): # 8 + '''Restores cursor position after a Save Cursor.''' + + self.cursor_home (self.cur_saved_r, self.cur_saved_c) + + def scroll_constrain (self): + '''This keeps the scroll region within the screen region.''' + + if self.scroll_row_start <= 0: + self.scroll_row_start = 1 + if self.scroll_row_end > self.rows: + self.scroll_row_end = self.rows + + def scroll_screen (self): # [r + '''Enable scrolling for entire display.''' + + self.scroll_row_start = 1 + self.scroll_row_end = self.rows + + def scroll_screen_rows (self, rs, re): # [{start};{end}r + '''Enable scrolling from row {start} to row {end}.''' + + self.scroll_row_start = rs + self.scroll_row_end = re + self.scroll_constrain() + + def scroll_down (self): # D + '''Scroll display down one line.''' + + # Screen is indexed from 1, but arrays are indexed from 0. + s = self.scroll_row_start - 1 + e = self.scroll_row_end - 1 + self.w[s+1:e+1] = copy.deepcopy(self.w[s:e]) + + def scroll_up (self): # M + '''Scroll display up one line.''' + + # Screen is indexed from 1, but arrays are indexed from 0. + s = self.scroll_row_start - 1 + e = self.scroll_row_end - 1 + self.w[s:e] = copy.deepcopy(self.w[s+1:e+1]) + + def erase_end_of_line (self): # [0K -or- [K + '''Erases from the current cursor position to the end of the current + line.''' + + self.fill_region (self.cur_r, self.cur_c, self.cur_r, self.cols) + + def erase_start_of_line (self): # [1K + '''Erases from the current cursor position to the start of the current + line.''' + + self.fill_region (self.cur_r, 1, self.cur_r, self.cur_c) + + def erase_line (self): # [2K + '''Erases the entire current line.''' + + self.fill_region (self.cur_r, 1, self.cur_r, self.cols) + + def erase_down (self): # [0J -or- [J + '''Erases the screen from the current line down to the bottom of the + screen.''' + + self.erase_end_of_line () + self.fill_region (self.cur_r + 1, 1, self.rows, self.cols) + + def erase_up (self): # [1J + '''Erases the screen from the current line up to the top of the + screen.''' + + self.erase_start_of_line () + self.fill_region (self.cur_r-1, 1, 1, self.cols) + + def erase_screen (self): # [2J + '''Erases the screen with the background color.''' + + self.fill () + + def set_tab (self): # H + '''Sets a tab at the current position.''' + + pass + + def clear_tab (self): # [g + '''Clears tab at the current position.''' + + pass + + def clear_all_tabs (self): # [3g + '''Clears all tabs.''' + + pass + +# Insert line Esc [ Pn L +# Delete line Esc [ Pn M +# Delete character Esc [ Pn P +# Scrolling region Esc [ Pn(top);Pn(bot) r + diff --git a/myenv/lib/python3.10/site-packages/pexpect/spawnbase.py b/myenv/lib/python3.10/site-packages/pexpect/spawnbase.py new file mode 100644 index 000000000..59e905764 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/spawnbase.py @@ -0,0 +1,525 @@ +from io import StringIO, BytesIO +import codecs +import os +import sys +import re +import errno +from .exceptions import ExceptionPexpect, EOF, TIMEOUT +from .expect import Expecter, searcher_string, searcher_re + +PY3 = (sys.version_info[0] >= 3) +text_type = str if PY3 else unicode + +class _NullCoder(object): + """Pass bytes through unchanged.""" + @staticmethod + def encode(b, final=False): + return b + + @staticmethod + def decode(b, final=False): + return b + +class SpawnBase(object): + """A base class providing the backwards-compatible spawn API for Pexpect. + + This should not be instantiated directly: use :class:`pexpect.spawn` or + :class:`pexpect.fdpexpect.fdspawn`. + """ + encoding = None + pid = None + flag_eof = False + + def __init__(self, timeout=30, maxread=2000, searchwindowsize=None, + logfile=None, encoding=None, codec_errors='strict'): + self.stdin = sys.stdin + self.stdout = sys.stdout + self.stderr = sys.stderr + + self.searcher = None + self.ignorecase = False + self.before = None + self.after = None + self.match = None + self.match_index = None + self.terminated = True + self.exitstatus = None + self.signalstatus = None + # status returned by os.waitpid + self.status = None + # the child file descriptor is initially closed + self.child_fd = -1 + self.timeout = timeout + self.delimiter = EOF + self.logfile = logfile + # input from child (read_nonblocking) + self.logfile_read = None + # output to send (send, sendline) + self.logfile_send = None + # max bytes to read at one time into buffer + self.maxread = maxread + # Data before searchwindowsize point is preserved, but not searched. + self.searchwindowsize = searchwindowsize + # Delay used before sending data to child. Time in seconds. + # Set this to None to skip the time.sleep() call completely. + self.delaybeforesend = 0.05 + # Used by close() to give kernel time to update process status. + # Time in seconds. + self.delayafterclose = 0.1 + # Used by terminate() to give kernel time to update process status. + # Time in seconds. + self.delayafterterminate = 0.1 + # Delay in seconds to sleep after each call to read_nonblocking(). + # Set this to None to skip the time.sleep() call completely: that + # would restore the behavior from pexpect-2.0 (for performance + # reasons or because you don't want to release Python's global + # interpreter lock). + self.delayafterread = 0.0001 + self.softspace = False + self.name = '<' + repr(self) + '>' + self.closed = True + + # Unicode interface + self.encoding = encoding + self.codec_errors = codec_errors + if encoding is None: + # bytes mode (accepts some unicode for backwards compatibility) + self._encoder = self._decoder = _NullCoder() + self.string_type = bytes + self.buffer_type = BytesIO + self.crlf = b'\r\n' + if PY3: + self.allowed_string_types = (bytes, str) + self.linesep = os.linesep.encode('ascii') + def write_to_stdout(b): + try: + return sys.stdout.buffer.write(b) + except AttributeError: + # If stdout has been replaced, it may not have .buffer + return sys.stdout.write(b.decode('ascii', 'replace')) + self.write_to_stdout = write_to_stdout + else: + self.allowed_string_types = (basestring,) # analysis:ignore + self.linesep = os.linesep + self.write_to_stdout = sys.stdout.write + else: + # unicode mode + self._encoder = codecs.getincrementalencoder(encoding)(codec_errors) + self._decoder = codecs.getincrementaldecoder(encoding)(codec_errors) + self.string_type = text_type + self.buffer_type = StringIO + self.crlf = u'\r\n' + self.allowed_string_types = (text_type, ) + if PY3: + self.linesep = os.linesep + else: + self.linesep = os.linesep.decode('ascii') + # This can handle unicode in both Python 2 and 3 + self.write_to_stdout = sys.stdout.write + # storage for async transport + self.async_pw_transport = None + # This is the read buffer. See maxread. + self._buffer = self.buffer_type() + # The buffer may be trimmed for efficiency reasons. This is the + # untrimmed buffer, used to create the before attribute. + self._before = self.buffer_type() + + def _log(self, s, direction): + if self.logfile is not None: + self.logfile.write(s) + self.logfile.flush() + second_log = self.logfile_send if (direction=='send') else self.logfile_read + if second_log is not None: + second_log.write(s) + second_log.flush() + + # For backwards compatibility, in bytes mode (when encoding is None) + # unicode is accepted for send and expect. Unicode mode is strictly unicode + # only. + def _coerce_expect_string(self, s): + if self.encoding is None and not isinstance(s, bytes): + return s.encode('ascii') + return s + + def _coerce_send_string(self, s): + if self.encoding is None and not isinstance(s, bytes): + return s.encode('utf-8') + return s + + def _get_buffer(self): + return self._buffer.getvalue() + + def _set_buffer(self, value): + self._buffer = self.buffer_type() + self._buffer.write(value) + + # This property is provided for backwards compatability (self.buffer used + # to be a string/bytes object) + buffer = property(_get_buffer, _set_buffer) + + def read_nonblocking(self, size=1, timeout=None): + """This reads data from the file descriptor. + + This is a simple implementation suitable for a regular file. Subclasses using ptys or pipes should override it. + + The timeout parameter is ignored. + """ + + try: + s = os.read(self.child_fd, size) + except OSError as err: + if err.args[0] == errno.EIO: + # Linux-style EOF + self.flag_eof = True + raise EOF('End Of File (EOF). Exception style platform.') + raise + if s == b'': + # BSD-style EOF + self.flag_eof = True + raise EOF('End Of File (EOF). Empty string style platform.') + + s = self._decoder.decode(s, final=False) + self._log(s, 'read') + return s + + def _pattern_type_err(self, pattern): + raise TypeError('got {badtype} ({badobj!r}) as pattern, must be one' + ' of: {goodtypes}, pexpect.EOF, pexpect.TIMEOUT'\ + .format(badtype=type(pattern), + badobj=pattern, + goodtypes=', '.join([str(ast)\ + for ast in self.allowed_string_types]) + ) + ) + + def compile_pattern_list(self, patterns): + '''This compiles a pattern-string or a list of pattern-strings. + Patterns must be a StringType, EOF, TIMEOUT, SRE_Pattern, or a list of + those. Patterns may also be None which results in an empty list (you + might do this if waiting for an EOF or TIMEOUT condition without + expecting any pattern). + + This is used by expect() when calling expect_list(). Thus expect() is + nothing more than:: + + cpl = self.compile_pattern_list(pl) + return self.expect_list(cpl, timeout) + + If you are using expect() within a loop it may be more + efficient to compile the patterns first and then call expect_list(). + This avoid calls in a loop to compile_pattern_list():: + + cpl = self.compile_pattern_list(my_pattern) + while some_condition: + ... + i = self.expect_list(cpl, timeout) + ... + ''' + + if patterns is None: + return [] + if not isinstance(patterns, list): + patterns = [patterns] + + # Allow dot to match \n + compile_flags = re.DOTALL + if self.ignorecase: + compile_flags = compile_flags | re.IGNORECASE + compiled_pattern_list = [] + for idx, p in enumerate(patterns): + if isinstance(p, self.allowed_string_types): + p = self._coerce_expect_string(p) + compiled_pattern_list.append(re.compile(p, compile_flags)) + elif p is EOF: + compiled_pattern_list.append(EOF) + elif p is TIMEOUT: + compiled_pattern_list.append(TIMEOUT) + elif isinstance(p, type(re.compile(''))): + compiled_pattern_list.append(p) + else: + self._pattern_type_err(p) + return compiled_pattern_list + + def expect(self, pattern, timeout=-1, searchwindowsize=-1, async_=False, **kw): + '''This seeks through the stream until a pattern is matched. The + pattern is overloaded and may take several types. The pattern can be a + StringType, EOF, a compiled re, or a list of any of those types. + Strings will be compiled to re types. This returns the index into the + pattern list. If the pattern was not a list this returns index 0 on a + successful match. This may raise exceptions for EOF or TIMEOUT. To + avoid the EOF or TIMEOUT exceptions add EOF or TIMEOUT to the pattern + list. That will cause expect to match an EOF or TIMEOUT condition + instead of raising an exception. + + If you pass a list of patterns and more than one matches, the first + match in the stream is chosen. If more than one pattern matches at that + point, the leftmost in the pattern list is chosen. For example:: + + # the input is 'foobar' + index = p.expect(['bar', 'foo', 'foobar']) + # returns 1('foo') even though 'foobar' is a "better" match + + Please note, however, that buffering can affect this behavior, since + input arrives in unpredictable chunks. For example:: + + # the input is 'foobar' + index = p.expect(['foobar', 'foo']) + # returns 0('foobar') if all input is available at once, + # but returns 1('foo') if parts of the final 'bar' arrive late + + When a match is found for the given pattern, the class instance + attribute *match* becomes an re.MatchObject result. Should an EOF + or TIMEOUT pattern match, then the match attribute will be an instance + of that exception class. The pairing before and after class + instance attributes are views of the data preceding and following + the matching pattern. On general exception, class attribute + *before* is all data received up to the exception, while *match* and + *after* attributes are value None. + + When the keyword argument timeout is -1 (default), then TIMEOUT will + raise after the default value specified by the class timeout + attribute. When None, TIMEOUT will not be raised and may block + indefinitely until match. + + When the keyword argument searchwindowsize is -1 (default), then the + value specified by the class maxread attribute is used. + + A list entry may be EOF or TIMEOUT instead of a string. This will + catch these exceptions and return the index of the list entry instead + of raising the exception. The attribute 'after' will be set to the + exception type. The attribute 'match' will be None. This allows you to + write code like this:: + + index = p.expect(['good', 'bad', pexpect.EOF, pexpect.TIMEOUT]) + if index == 0: + do_something() + elif index == 1: + do_something_else() + elif index == 2: + do_some_other_thing() + elif index == 3: + do_something_completely_different() + + instead of code like this:: + + try: + index = p.expect(['good', 'bad']) + if index == 0: + do_something() + elif index == 1: + do_something_else() + except EOF: + do_some_other_thing() + except TIMEOUT: + do_something_completely_different() + + These two forms are equivalent. It all depends on what you want. You + can also just expect the EOF if you are waiting for all output of a + child to finish. For example:: + + p = pexpect.spawn('/bin/ls') + p.expect(pexpect.EOF) + print p.before + + If you are trying to optimize for speed then see expect_list(). + + On Python 3.4, or Python 3.3 with asyncio installed, passing + ``async_=True`` will make this return an :mod:`asyncio` coroutine, + which you can yield from to get the same result that this method would + normally give directly. So, inside a coroutine, you can replace this code:: + + index = p.expect(patterns) + + With this non-blocking form:: + + index = yield from p.expect(patterns, async_=True) + ''' + if 'async' in kw: + async_ = kw.pop('async') + if kw: + raise TypeError("Unknown keyword arguments: {}".format(kw)) + + compiled_pattern_list = self.compile_pattern_list(pattern) + return self.expect_list(compiled_pattern_list, + timeout, searchwindowsize, async_) + + def expect_list(self, pattern_list, timeout=-1, searchwindowsize=-1, + async_=False, **kw): + '''This takes a list of compiled regular expressions and returns the + index into the pattern_list that matched the child output. The list may + also contain EOF or TIMEOUT(which are not compiled regular + expressions). This method is similar to the expect() method except that + expect_list() does not recompile the pattern list on every call. This + may help if you are trying to optimize for speed, otherwise just use + the expect() method. This is called by expect(). + + + Like :meth:`expect`, passing ``async_=True`` will make this return an + asyncio coroutine. + ''' + if timeout == -1: + timeout = self.timeout + if 'async' in kw: + async_ = kw.pop('async') + if kw: + raise TypeError("Unknown keyword arguments: {}".format(kw)) + + exp = Expecter(self, searcher_re(pattern_list), searchwindowsize) + if async_: + from ._async import expect_async + return expect_async(exp, timeout) + else: + return exp.expect_loop(timeout) + + def expect_exact(self, pattern_list, timeout=-1, searchwindowsize=-1, + async_=False, **kw): + + '''This is similar to expect(), but uses plain string matching instead + of compiled regular expressions in 'pattern_list'. The 'pattern_list' + may be a string; a list or other sequence of strings; or TIMEOUT and + EOF. + + This call might be faster than expect() for two reasons: string + searching is faster than RE matching and it is possible to limit the + search to just the end of the input buffer. + + This method is also useful when you don't want to have to worry about + escaping regular expression characters that you want to match. + + Like :meth:`expect`, passing ``async_=True`` will make this return an + asyncio coroutine. + ''' + if timeout == -1: + timeout = self.timeout + if 'async' in kw: + async_ = kw.pop('async') + if kw: + raise TypeError("Unknown keyword arguments: {}".format(kw)) + + if (isinstance(pattern_list, self.allowed_string_types) or + pattern_list in (TIMEOUT, EOF)): + pattern_list = [pattern_list] + + def prepare_pattern(pattern): + if pattern in (TIMEOUT, EOF): + return pattern + if isinstance(pattern, self.allowed_string_types): + return self._coerce_expect_string(pattern) + self._pattern_type_err(pattern) + + try: + pattern_list = iter(pattern_list) + except TypeError: + self._pattern_type_err(pattern_list) + pattern_list = [prepare_pattern(p) for p in pattern_list] + + exp = Expecter(self, searcher_string(pattern_list), searchwindowsize) + if async_: + from ._async import expect_async + return expect_async(exp, timeout) + else: + return exp.expect_loop(timeout) + + def expect_loop(self, searcher, timeout=-1, searchwindowsize=-1): + '''This is the common loop used inside expect. The 'searcher' should be + an instance of searcher_re or searcher_string, which describes how and + what to search for in the input. + + See expect() for other arguments, return value and exceptions. ''' + + exp = Expecter(self, searcher, searchwindowsize) + return exp.expect_loop(timeout) + + def read(self, size=-1): + '''This reads at most "size" bytes from the file (less if the read hits + EOF before obtaining size bytes). If the size argument is negative or + omitted, read all data until EOF is reached. The bytes are returned as + a string object. An empty string is returned when EOF is encountered + immediately. ''' + + if size == 0: + return self.string_type() + if size < 0: + # delimiter default is EOF + self.expect(self.delimiter) + return self.before + + # I could have done this more directly by not using expect(), but + # I deliberately decided to couple read() to expect() so that + # I would catch any bugs early and ensure consistent behavior. + # It's a little less efficient, but there is less for me to + # worry about if I have to later modify read() or expect(). + # Note, it's OK if size==-1 in the regex. That just means it + # will never match anything in which case we stop only on EOF. + cre = re.compile(self._coerce_expect_string('.{%d}' % size), re.DOTALL) + # delimiter default is EOF + index = self.expect([cre, self.delimiter]) + if index == 0: + ### FIXME self.before should be ''. Should I assert this? + return self.after + return self.before + + def readline(self, size=-1): + '''This reads and returns one entire line. The newline at the end of + line is returned as part of the string, unless the file ends without a + newline. An empty string is returned if EOF is encountered immediately. + This looks for a newline as a CR/LF pair (\\r\\n) even on UNIX because + this is what the pseudotty device returns. So contrary to what you may + expect you will receive newlines as \\r\\n. + + If the size argument is 0 then an empty string is returned. In all + other cases the size argument is ignored, which is not standard + behavior for a file-like object. ''' + + if size == 0: + return self.string_type() + # delimiter default is EOF + index = self.expect([self.crlf, self.delimiter]) + if index == 0: + return self.before + self.crlf + else: + return self.before + + def __iter__(self): + '''This is to support iterators over a file-like object. + ''' + return iter(self.readline, self.string_type()) + + def readlines(self, sizehint=-1): + '''This reads until EOF using readline() and returns a list containing + the lines thus read. The optional 'sizehint' argument is ignored. + Remember, because this reads until EOF that means the child + process should have closed its stdout. If you run this method on + a child that is still running with its stdout open then this + method will block until it timesout.''' + + lines = [] + while True: + line = self.readline() + if not line: + break + lines.append(line) + return lines + + def fileno(self): + '''Expose file descriptor for a file-like interface + ''' + return self.child_fd + + def flush(self): + '''This does nothing. It is here to support the interface for a + File-like object. ''' + pass + + def isatty(self): + """Overridden in subclass using tty""" + return False + + # For 'with spawn(...) as child:' + def __enter__(self): + return self + + def __exit__(self, etype, evalue, tb): + # We rely on subclasses to implement close(). If they don't, it's not + # clear what a context manager should do. + self.close() diff --git a/myenv/lib/python3.10/site-packages/pexpect/utils.py b/myenv/lib/python3.10/site-packages/pexpect/utils.py new file mode 100644 index 000000000..f77451960 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pexpect/utils.py @@ -0,0 +1,187 @@ +import os +import sys +import stat +import select +import time +import errno + +try: + InterruptedError +except NameError: + # Alias Python2 exception to Python3 + InterruptedError = select.error + +if sys.version_info[0] >= 3: + string_types = (str,) +else: + string_types = (unicode, str) + + +def is_executable_file(path): + """Checks that path is an executable regular file, or a symlink towards one. + + This is roughly ``os.path isfile(path) and os.access(path, os.X_OK)``. + """ + # follow symlinks, + fpath = os.path.realpath(path) + + if not os.path.isfile(fpath): + # non-files (directories, fifo, etc.) + return False + + mode = os.stat(fpath).st_mode + + if (sys.platform.startswith('sunos') + and os.getuid() == 0): + # When root on Solaris, os.X_OK is True for *all* files, irregardless + # of their executability -- instead, any permission bit of any user, + # group, or other is fine enough. + # + # (This may be true for other "Unix98" OS's such as HP-UX and AIX) + return bool(mode & (stat.S_IXUSR | + stat.S_IXGRP | + stat.S_IXOTH)) + + return os.access(fpath, os.X_OK) + + +def which(filename, env=None): + '''This takes a given filename; tries to find it in the environment path; + then checks if it is executable. This returns the full path to the filename + if found and executable. Otherwise this returns None.''' + + # Special case where filename contains an explicit path. + if os.path.dirname(filename) != '' and is_executable_file(filename): + return filename + if env is None: + env = os.environ + p = env.get('PATH') + if not p: + p = os.defpath + pathlist = p.split(os.pathsep) + for path in pathlist: + ff = os.path.join(path, filename) + if is_executable_file(ff): + return ff + return None + + +def split_command_line(command_line): + + '''This splits a command line into a list of arguments. It splits arguments + on spaces, but handles embedded quotes, doublequotes, and escaped + characters. It's impossible to do this with a regular expression, so I + wrote a little state machine to parse the command line. ''' + + arg_list = [] + arg = '' + + # Constants to name the states we can be in. + state_basic = 0 + state_esc = 1 + state_singlequote = 2 + state_doublequote = 3 + # The state when consuming whitespace between commands. + state_whitespace = 4 + state = state_basic + + for c in command_line: + if state == state_basic or state == state_whitespace: + if c == '\\': + # Escape the next character + state = state_esc + elif c == r"'": + # Handle single quote + state = state_singlequote + elif c == r'"': + # Handle double quote + state = state_doublequote + elif c.isspace(): + # Add arg to arg_list if we aren't in the middle of whitespace. + if state == state_whitespace: + # Do nothing. + None + else: + arg_list.append(arg) + arg = '' + state = state_whitespace + else: + arg = arg + c + state = state_basic + elif state == state_esc: + arg = arg + c + state = state_basic + elif state == state_singlequote: + if c == r"'": + state = state_basic + else: + arg = arg + c + elif state == state_doublequote: + if c == r'"': + state = state_basic + else: + arg = arg + c + + if arg != '': + arg_list.append(arg) + return arg_list + + +def select_ignore_interrupts(iwtd, owtd, ewtd, timeout=None): + + '''This is a wrapper around select.select() that ignores signals. If + select.select raises a select.error exception and errno is an EINTR + error then it is ignored. Mainly this is used to ignore sigwinch + (terminal resize). ''' + + # if select() is interrupted by a signal (errno==EINTR) then + # we loop back and enter the select() again. + if timeout is not None: + end_time = time.time() + timeout + while True: + try: + return select.select(iwtd, owtd, ewtd, timeout) + except InterruptedError: + err = sys.exc_info()[1] + if err.args[0] == errno.EINTR: + # if we loop back we have to subtract the + # amount of time we already waited. + if timeout is not None: + timeout = end_time - time.time() + if timeout < 0: + return([], [], []) + else: + # something else caused the select.error, so + # this actually is an exception. + raise + + +def poll_ignore_interrupts(fds, timeout=None): + '''Simple wrapper around poll to register file descriptors and + ignore signals.''' + + if timeout is not None: + end_time = time.time() + timeout + + poller = select.poll() + for fd in fds: + poller.register(fd, select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR) + + while True: + try: + timeout_ms = None if timeout is None else timeout * 1000 + results = poller.poll(timeout_ms) + return [afd for afd, _ in results] + except InterruptedError: + err = sys.exc_info()[1] + if err.args[0] == errno.EINTR: + # if we loop back we have to subtract the + # amount of time we already waited. + if timeout is not None: + timeout = end_time - time.time() + if timeout < 0: + return [] + else: + # something else caused the select.error, so + # this actually is an exception. + raise diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/DESCRIPTION.rst b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/DESCRIPTION.rst new file mode 100644 index 000000000..c5c2ca0ed --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/DESCRIPTION.rst @@ -0,0 +1,27 @@ +PickleShare - a small 'shelve' like datastore with concurrency support + +Like shelve, a PickleShareDB object acts like a normal dictionary. Unlike shelve, +many processes can access the database simultaneously. Changing a value in +database is immediately visible to other processes accessing the same database. + +Concurrency is possible because the values are stored in separate files. Hence +the "database" is a directory where *all* files are governed by PickleShare. + +Example usage:: + + from pickleshare import * + db = PickleShareDB('~/testpickleshare') + db.clear() + print("Should be empty:",db.items()) + db['hello'] = 15 + db['aku ankka'] = [1,2,313] + db['paths/are/ok/key'] = [1,(5,46)] + print(db.keys()) + +This module is certainly not ZODB, but can be used for low-load +(non-mission-critical) situations where tiny code size trumps the +advanced features of a "real" object database. + +Installation guide: pip install pickleshare + + diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/INSTALLER b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/METADATA b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/METADATA new file mode 100644 index 000000000..4d9cb5fb9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/METADATA @@ -0,0 +1,43 @@ +Metadata-Version: 2.0 +Name: pickleshare +Version: 0.7.5 +Summary: Tiny 'shelve'-like database with concurrency support +Home-page: https://github.com/pickleshare/pickleshare +Author: Ville Vainio +Author-email: vivainio@gmail.com +License: MIT +Keywords: database persistence pickle ipc shelve +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Requires-Dist: pathlib2; python_version in "2.6 2.7 3.2 3.3" + +PickleShare - a small 'shelve' like datastore with concurrency support + +Like shelve, a PickleShareDB object acts like a normal dictionary. Unlike shelve, +many processes can access the database simultaneously. Changing a value in +database is immediately visible to other processes accessing the same database. + +Concurrency is possible because the values are stored in separate files. Hence +the "database" is a directory where *all* files are governed by PickleShare. + +Example usage:: + + from pickleshare import * + db = PickleShareDB('~/testpickleshare') + db.clear() + print("Should be empty:",db.items()) + db['hello'] = 15 + db['aku ankka'] = [1,2,313] + db['paths/are/ok/key'] = [1,(5,46)] + print(db.keys()) + +This module is certainly not ZODB, but can be used for low-load +(non-mission-critical) situations where tiny code size trumps the +advanced features of a "real" object database. + +Installation guide: pip install pickleshare + + diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/RECORD b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/RECORD new file mode 100644 index 000000000..01096fc35 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/RECORD @@ -0,0 +1,10 @@ +__pycache__/pickleshare.cpython-310.pyc,, +pickleshare-0.7.5.dist-info/DESCRIPTION.rst,sha256=MzTGgSadINt6oklHN7G__Xz9_AGWxZ8wSUF52_xlqMc,955 +pickleshare-0.7.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pickleshare-0.7.5.dist-info/METADATA,sha256=gtNHj-qWJlHKtLE9qkGxPzN3GwPOw9dtHs51VQR-Ce4,1519 +pickleshare-0.7.5.dist-info/RECORD,, +pickleshare-0.7.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pickleshare-0.7.5.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pickleshare-0.7.5.dist-info/metadata.json,sha256=7sZL7JqXL9AxCsI_tY_kSTedkIcXt9fcUilY3cJbXx0,805 +pickleshare-0.7.5.dist-info/top_level.txt,sha256=GiYXIFGUaJgH-I56GU6Q9DA8QmwBtTNL1_UMBlESlts,12 +pickleshare.py,sha256=YAYV_g5pDnVfO3IeR5vMyNNIWs4dhBMtTfIHevLKI6E,9942 diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/REQUESTED b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/WHEEL b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/WHEEL new file mode 100644 index 000000000..8b6dd1b5a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/metadata.json b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/metadata.json new file mode 100644 index 000000000..8eefd98c1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "vivainio@gmail.com", "name": "Ville Vainio", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/pickleshare/pickleshare"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "keywords": ["database", "persistence", "pickle", "ipc", "shelve"], "license": "MIT", "metadata_version": "2.0", "name": "pickleshare", "run_requires": [{"environment": "python_version in \"2.6 2.7 3.2 3.3\"", "requires": ["pathlib2"]}], "summary": "Tiny 'shelve'-like database with concurrency support", "version": "0.7.5"} \ No newline at end of file diff --git a/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/top_level.txt b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/top_level.txt new file mode 100644 index 000000000..e28afd85e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare-0.7.5.dist-info/top_level.txt @@ -0,0 +1 @@ +pickleshare diff --git a/myenv/lib/python3.10/site-packages/pickleshare.py b/myenv/lib/python3.10/site-packages/pickleshare.py new file mode 100644 index 000000000..086f84f6e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pickleshare.py @@ -0,0 +1,352 @@ +#!/usr/bin/env python + +""" PickleShare - a small 'shelve' like datastore with concurrency support + +Like shelve, a PickleShareDB object acts like a normal dictionary. Unlike +shelve, many processes can access the database simultaneously. Changing a +value in database is immediately visible to other processes accessing the +same database. + +Concurrency is possible because the values are stored in separate files. Hence +the "database" is a directory where *all* files are governed by PickleShare. + +Example usage:: + + from pickleshare import * + db = PickleShareDB('~/testpickleshare') + db.clear() + print "Should be empty:",db.items() + db['hello'] = 15 + db['aku ankka'] = [1,2,313] + db['paths/are/ok/key'] = [1,(5,46)] + print db.keys() + del db['aku ankka'] + +This module is certainly not ZODB, but can be used for low-load +(non-mission-critical) situations where tiny code size trumps the +advanced features of a "real" object database. + +Installation guide: pip install pickleshare + +Author: Ville Vainio +License: MIT open source license. + +""" + +from __future__ import print_function + + +__version__ = "0.7.5" + +try: + from pathlib import Path +except ImportError: + # Python 2 backport + from pathlib2 import Path + +import os,stat,time +try: + import collections.abc as collections_abc +except ImportError: + import collections as collections_abc +try: + import cPickle as pickle +except ImportError: + import pickle +import errno +import sys + +if sys.version_info[0] >= 3: + string_types = (str,) +else: + string_types = (str, unicode) + +def gethashfile(key): + return ("%02x" % abs(hash(key) % 256))[-2:] + +_sentinel = object() + +class PickleShareDB(collections_abc.MutableMapping): + """ The main 'connection' object for PickleShare database """ + def __init__(self,root): + """ Return a db object that will manage the specied directory""" + if not isinstance(root, string_types): + root = str(root) + root = os.path.abspath(os.path.expanduser(root)) + self.root = Path(root) + if not self.root.is_dir(): + # catching the exception is necessary if multiple processes are concurrently trying to create a folder + # exists_ok keyword argument of mkdir does the same but only from Python 3.5 + try: + self.root.mkdir(parents=True) + except OSError as e: + if e.errno != errno.EEXIST: + raise + # cache has { 'key' : (obj, orig_mod_time) } + self.cache = {} + + + def __getitem__(self,key): + """ db['key'] reading """ + fil = self.root / key + try: + mtime = (fil.stat()[stat.ST_MTIME]) + except OSError: + raise KeyError(key) + + if fil in self.cache and mtime == self.cache[fil][1]: + return self.cache[fil][0] + try: + # The cached item has expired, need to read + with fil.open("rb") as f: + obj = pickle.loads(f.read()) + except: + raise KeyError(key) + + self.cache[fil] = (obj,mtime) + return obj + + def __setitem__(self,key,value): + """ db['key'] = 5 """ + fil = self.root / key + parent = fil.parent + if parent and not parent.is_dir(): + parent.mkdir(parents=True) + # We specify protocol 2, so that we can mostly go between Python 2 + # and Python 3. We can upgrade to protocol 3 when Python 2 is obsolete. + with fil.open('wb') as f: + pickle.dump(value, f, protocol=2) + try: + self.cache[fil] = (value, fil.stat().st_mtime) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + def hset(self, hashroot, key, value): + """ hashed set """ + hroot = self.root / hashroot + if not hroot.is_dir(): + hroot.mkdir() + hfile = hroot / gethashfile(key) + d = self.get(hfile, {}) + d.update( {key : value}) + self[hfile] = d + + + + def hget(self, hashroot, key, default = _sentinel, fast_only = True): + """ hashed get """ + hroot = self.root / hashroot + hfile = hroot / gethashfile(key) + + d = self.get(hfile, _sentinel ) + #print "got dict",d,"from",hfile + if d is _sentinel: + if fast_only: + if default is _sentinel: + raise KeyError(key) + + return default + + # slow mode ok, works even after hcompress() + d = self.hdict(hashroot) + + return d.get(key, default) + + def hdict(self, hashroot): + """ Get all data contained in hashed category 'hashroot' as dict """ + hfiles = self.keys(hashroot + "/*") + hfiles.sort() + last = len(hfiles) and hfiles[-1] or '' + if last.endswith('xx'): + # print "using xx" + hfiles = [last] + hfiles[:-1] + + all = {} + + for f in hfiles: + # print "using",f + try: + all.update(self[f]) + except KeyError: + print("Corrupt",f,"deleted - hset is not threadsafe!") + del self[f] + + self.uncache(f) + + return all + + def hcompress(self, hashroot): + """ Compress category 'hashroot', so hset is fast again + + hget will fail if fast_only is True for compressed items (that were + hset before hcompress). + + """ + hfiles = self.keys(hashroot + "/*") + all = {} + for f in hfiles: + # print "using",f + all.update(self[f]) + self.uncache(f) + + self[hashroot + '/xx'] = all + for f in hfiles: + p = self.root / f + if p.name == 'xx': + continue + p.unlink() + + + + def __delitem__(self,key): + """ del db["key"] """ + fil = self.root / key + self.cache.pop(fil,None) + try: + fil.unlink() + except OSError: + # notfound and permission denied are ok - we + # lost, the other process wins the conflict + pass + + def _normalized(self, p): + """ Make a key suitable for user's eyes """ + return str(p.relative_to(self.root)).replace('\\','/') + + def keys(self, globpat = None): + """ All keys in DB, or all keys matching a glob""" + + if globpat is None: + files = self.root.rglob('*') + else: + files = self.root.glob(globpat) + return [self._normalized(p) for p in files if p.is_file()] + + def __iter__(self): + return iter(self.keys()) + + def __len__(self): + return len(self.keys()) + + def uncache(self,*items): + """ Removes all, or specified items from cache + + Use this after reading a large amount of large objects + to free up memory, when you won't be needing the objects + for a while. + + """ + if not items: + self.cache = {} + for it in items: + self.cache.pop(it,None) + + def waitget(self,key, maxwaittime = 60 ): + """ Wait (poll) for a key to get a value + + Will wait for `maxwaittime` seconds before raising a KeyError. + The call exits normally if the `key` field in db gets a value + within the timeout period. + + Use this for synchronizing different processes or for ensuring + that an unfortunately timed "db['key'] = newvalue" operation + in another process (which causes all 'get' operation to cause a + KeyError for the duration of pickling) won't screw up your program + logic. + """ + + wtimes = [0.2] * 3 + [0.5] * 2 + [1] + tries = 0 + waited = 0 + while 1: + try: + val = self[key] + return val + except KeyError: + pass + + if waited > maxwaittime: + raise KeyError(key) + + time.sleep(wtimes[tries]) + waited+=wtimes[tries] + if tries < len(wtimes) -1: + tries+=1 + + def getlink(self,folder): + """ Get a convenient link for accessing items """ + return PickleShareLink(self, folder) + + def __repr__(self): + return "PickleShareDB('%s')" % self.root + + + +class PickleShareLink: + """ A shortdand for accessing nested PickleShare data conveniently. + + Created through PickleShareDB.getlink(), example:: + + lnk = db.getlink('myobjects/test') + lnk.foo = 2 + lnk.bar = lnk.foo + 5 + + """ + def __init__(self, db, keydir ): + self.__dict__.update(locals()) + + def __getattr__(self,key): + return self.__dict__['db'][self.__dict__['keydir']+'/' + key] + def __setattr__(self,key,val): + self.db[self.keydir+'/' + key] = val + def __repr__(self): + db = self.__dict__['db'] + keys = db.keys( self.__dict__['keydir'] +"/*") + return "" % ( + self.__dict__['keydir'], + ";".join([Path(k).basename() for k in keys])) + +def main(): + import textwrap + usage = textwrap.dedent("""\ + pickleshare - manage PickleShare databases + + Usage: + + pickleshare dump /path/to/db > dump.txt + pickleshare load /path/to/db < dump.txt + pickleshare test /path/to/db + """) + DB = PickleShareDB + import sys + if len(sys.argv) < 2: + print(usage) + return + + cmd = sys.argv[1] + args = sys.argv[2:] + if cmd == 'dump': + if not args: args= ['.'] + db = DB(args[0]) + import pprint + pprint.pprint(db.items()) + elif cmd == 'load': + cont = sys.stdin.read() + db = DB(args[0]) + data = eval(cont) + db.clear() + for k,v in db.items(): + db[k] = v + elif cmd == 'testwait': + db = DB(args[0]) + db.clear() + print(db.waitget('250')) + elif cmd == 'test': + test() + stress() + +if __name__== "__main__": + main() + + diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/INSTALLER b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/LICENSE.txt b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/LICENSE.txt new file mode 100644 index 000000000..8e7b65eaf --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-present The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/METADATA b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/METADATA new file mode 100644 index 000000000..29392cd5a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/METADATA @@ -0,0 +1,92 @@ +Metadata-Version: 2.1 +Name: pip +Version: 22.0.2 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: distutils-sig@python.org +License: MIT +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.7 +License-File: LICENSE.txt + +pip - The Python Package Installer +================================== + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + +* `Release notes`_ +* `Release process`_ + +In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right. + +**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. + +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Development documentation`_ +* `Development mailing list`_ +* `Development IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installation/ +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html +.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020 +.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html +.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ +.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + + diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/RECORD b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/RECORD new file mode 100644 index 000000000..174068a1a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/RECORD @@ -0,0 +1,1037 @@ +../../../bin/pip,sha256=MRwMn8VRspHsyo4kyRvhJR6jMNGc9s5P0eJvz2sbzdI,317 +../../../bin/pip3,sha256=MRwMn8VRspHsyo4kyRvhJR6jMNGc9s5P0eJvz2sbzdI,317 +../../../bin/pip3.10,sha256=MRwMn8VRspHsyo4kyRvhJR6jMNGc9s5P0eJvz2sbzdI,317 +../../../bin/pip3.10,sha256=MRwMn8VRspHsyo4kyRvhJR6jMNGc9s5P0eJvz2sbzdI,317 +pip-22.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-22.0.2.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093 +pip-22.0.2.dist-info/METADATA,sha256=Yixa0LKkyzjT2N5JQO5qYDgZcmTs6Z6dg4UbwBNyT2A,4166 +pip-22.0.2.dist-info/RECORD,, +pip-22.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip-22.0.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +pip-22.0.2.dist-info/entry_points.txt,sha256=vUvIlB_ga0fFQuWvFEq6uJKftMG_HNuoe4kgXkb5rNY,126 +pip-22.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=PZBF-ESk5Q0DZxQd4HHmTU_wX8y1ynzxBCRdu_fxHSI,357 +pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198 +pip/__pycache__/__init__.cpython-310.pyc,, +pip/__pycache__/__main__.cpython-310.pyc,, +pip/_internal/__init__.py,sha256=nnFCuxrPMgALrIDxSoy-H6Zj4W4UY60D-uL1aJyq0pc,573 +pip/_internal/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/__pycache__/build_env.cpython-310.pyc,, +pip/_internal/__pycache__/cache.cpython-310.pyc,, +pip/_internal/__pycache__/configuration.cpython-310.pyc,, +pip/_internal/__pycache__/exceptions.cpython-310.pyc,, +pip/_internal/__pycache__/main.cpython-310.pyc,, +pip/_internal/__pycache__/pyproject.cpython-310.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-310.pyc,, +pip/_internal/build_env.py,sha256=QAsnxJFvj74jS2cZUcxk7zXLvrtAYiRL0EkSPkpSJTo,9739 +pip/_internal/cache.py,sha256=71eaYwrls34HJ6gzbmmYiotiKhPNFTM_tqYJXD5nf3s,9441 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-310.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-310.pyc,, +pip/_internal/cli/__pycache__/main.cpython-310.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-310.pyc,, +pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-310.pyc,, +pip/_internal/cli/__pycache__/spinners.cpython-310.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc,, +pip/_internal/cli/autocompletion.py,sha256=wY2JPZY2Eji1vhR7bVo-yCBPJ9LCy6P80iOAhZD1Vi8,6676 +pip/_internal/cli/base_command.py,sha256=6IVFmOjObv0ILip28QcgP8glhXHiGRvU_9kO35Hr7Z0,8037 +pip/_internal/cli/cmdoptions.py,sha256=GT2G2YKBj-851qGseugn2Veq7fJe3FA30gWdcziPQvo,28525 +pip/_internal/cli/command_context.py,sha256=a1pBBvvGLDiZ1Kw64_4tT6HmRTwYDoYy8JFgG5Czn7s,760 +pip/_internal/cli/main.py,sha256=ioJ8IVlb2K1qLOxR-tXkee9lURhYV89CDM71MKag7YY,2472 +pip/_internal/cli/main_parser.py,sha256=Q9TnytfuC5Z2JSjBFWVGtEdYLFy7rukNIb04movHdAo,2614 +pip/_internal/cli/parser.py,sha256=CDXTuFr2UD8ozOlZYf1KDziQdo9-X_IaYOiUcyJQwrA,10788 +pip/_internal/cli/progress_bars.py,sha256=_52w11WoZrvDSR3oItLWvLrEZFUKAfLf4Y6I6WtOnIU,10339 +pip/_internal/cli/req_command.py,sha256=VwqonOy18QwZsRsVjHhp-6w15fG9x3Ltwoa8yJqQno8,18669 +pip/_internal/cli/spinners.py,sha256=TFhjxtOnLeNJ5YmRvQm4eKPgPbJNkZiqO8jOXuxRaYU,5076 +pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 +pip/_internal/commands/__init__.py,sha256=Vc1HjsLEtyCh7506OozPHPKXe2Hk-z9cFkFF3BMj1lM,3736 +pip/_internal/commands/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/commands/__pycache__/cache.cpython-310.pyc,, +pip/_internal/commands/__pycache__/check.cpython-310.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-310.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-310.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-310.pyc,, +pip/_internal/commands/__pycache__/download.cpython-310.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-310.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-310.pyc,, +pip/_internal/commands/__pycache__/help.cpython-310.pyc,, +pip/_internal/commands/__pycache__/index.cpython-310.pyc,, +pip/_internal/commands/__pycache__/install.cpython-310.pyc,, +pip/_internal/commands/__pycache__/list.cpython-310.pyc,, +pip/_internal/commands/__pycache__/search.cpython-310.pyc,, +pip/_internal/commands/__pycache__/show.cpython-310.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-310.pyc,, +pip/_internal/commands/cache.py,sha256=p9gvc6W_xgxE2zO0o8NXqO1gGJEinEK42qEC-a7Cnuk,7524 +pip/_internal/commands/check.py,sha256=0gjXR7j36xJT5cs2heYU_dfOfpnFfzX8OoPNNoKhqdM,1685 +pip/_internal/commands/completion.py,sha256=kTG_I1VR3N5kGC4Ma9pQTSoY9Q1URCrNyseHSQ-rCL4,2958 +pip/_internal/commands/configuration.py,sha256=arE8vLstjBg-Ar1krXF-bBmT1qBtnL7Fpk-NVh38a0U,8944 +pip/_internal/commands/debug.py,sha256=krET-y45CnQzXwKR1qA3M_tJE4LE2vnQtm3yfGyDSnE,6629 +pip/_internal/commands/download.py,sha256=gVIAEOcpWolhRj9hl89Qzn52G2b_pcZ8naXhxaXobdo,4942 +pip/_internal/commands/freeze.py,sha256=PaJJB9mT_3vHeZ3mbFL_m1fzTYL-_Or3kDtXwTdZZ-A,2968 +pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703 +pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132 +pip/_internal/commands/index.py,sha256=8pYkICUJlccjm3E83b7UuZ5DtOfLh1N7ZHXAgkajjHo,4849 +pip/_internal/commands/install.py,sha256=YVygBF6vfrNi0jmdNBCM6bcoWb7vaALEGG1--8Mmf88,27893 +pip/_internal/commands/list.py,sha256=aKt1PP7enTiNLD_1qDXXaIKQ2QvLmUDfoQU6SYxJ8Ek,12318 +pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697 +pip/_internal/commands/show.py,sha256=2VicM3jF0YWgn4O1jG_QF5oxOT0ln57VDu1NE6hqWcM,5859 +pip/_internal/commands/uninstall.py,sha256=DNTYAGJNljMO_YYBxrpcwj0FEl7lo_P55_98O6g2TNk,3526 +pip/_internal/commands/wheel.py,sha256=7HAjLclZxIzBrX6JmhmGBVxH5xrjaBYCtSdpQi1pWCE,6206 +pip/_internal/configuration.py,sha256=qmCX3uuVM73PQeAuWQHic22bhops8s31B8k02nFAoiQ,13171 +pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858 +pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-310.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-310.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc,, +pip/_internal/distributions/base.py,sha256=3FUYD8Gb4YuSu3pggC_FRctZBDbpm5ZK89tPksIUjoE,1172 +pip/_internal/distributions/installed.py,sha256=HzfNRu3smoOm54m8H2iK6LHzBx6_DEnka4OPEsizbXg,680 +pip/_internal/distributions/sdist.py,sha256=0nJvU1RhZtbwaeYtLbzSwYrbGRcY6IgNsWdEhAHROK8,5499 +pip/_internal/distributions/wheel.py,sha256=-NgzdIs-w_hcer_U81yzgpVTljJRg5m79xufqvbjv0s,1115 +pip/_internal/exceptions.py,sha256=U-dV1ixkSz6NAU6Aw9dosKi2EzZ5D3BA7ilYZuTLKeU,20912 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/index/__pycache__/collector.cpython-310.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-310.pyc,, +pip/_internal/index/__pycache__/sources.cpython-310.pyc,, +pip/_internal/index/collector.py,sha256=8kXlmlnZ-qAknyxd0duCn5mxFHX-zr468ykutk8WOwo,21392 +pip/_internal/index/package_finder.py,sha256=9UVg-7582nYNEWa0cIIl8otzPm4mlfyrQVuozAcssLo,36783 +pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557 +pip/_internal/locations/__init__.py,sha256=ergvPwlfNTmQYFmaRYbj--ZwTN5izgTL9KE5d0FB7-8,17362 +pip/_internal/locations/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc,, +pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc,, +pip/_internal/locations/__pycache__/base.cpython-310.pyc,, +pip/_internal/locations/_distutils.py,sha256=Sk7tw8ZP1DWMYJ8MibABsa8IME2Ejv1PKeGlYQCBTZc,5871 +pip/_internal/locations/_sysconfig.py,sha256=LQNKTJKyjVqxXaPntlBwdUqTG1xwYf6GVCKMbyRJx5M,7918 +pip/_internal/locations/base.py,sha256=x5D1ONktmPJd8nnUTh-ELsAJ7fiXA-k-0a_vhfi2_Us,1579 +pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340 +pip/_internal/metadata/__init__.py,sha256=iGoDbe_iTXQTIAEVy9f7dm-VQfZANO8kkwFr1CpqxqI,2036 +pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/metadata/__pycache__/base.cpython-310.pyc,, +pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc,, +pip/_internal/metadata/base.py,sha256=SCRPtShrtPy0lfFxuaFTgJJHsRXToGFToQUAZoBBbeA,19429 +pip/_internal/metadata/pkg_resources.py,sha256=wAnEtrcgH9YtV996MfoBjR2hGLHvi3uxk0vUOHbqBak,9456 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-310.pyc,, +pip/_internal/models/__pycache__/direct_url.cpython-310.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-310.pyc,, +pip/_internal/models/__pycache__/index.cpython-310.pyc,, +pip/_internal/models/__pycache__/link.cpython-310.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-310.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-310.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-310.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-310.pyc,, +pip/_internal/models/candidate.py,sha256=6pcABsaR7CfIHlbJbr2_kMkVJFL_yrYjTx6SVWUnCPQ,990 +pip/_internal/models/direct_url.py,sha256=7XtGQSLLDQb5ZywI2EMnnLcddtf5CJLx44lMtTHPxFw,6350 +pip/_internal/models/format_control.py,sha256=DJpMYjxeYKKQdwNcML2_F0vtAh-qnKTYe-CpTxQe-4g,2520 +pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030 +pip/_internal/models/link.py,sha256=hoT_qsOBAgLBm9GKqpBrNF_mrEXeGXQE-aH_RX2cGgg,9817 +pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738 +pip/_internal/models/search_scope.py,sha256=LwloG0PJAmtI1hFXIypsD95kWE9xfR5hf_a2v1Vw7sk,4520 +pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907 +pip/_internal/models/target_python.py,sha256=qKpZox7J8NAaPmDs5C_aniwfPDxzvpkrCKqfwndG87k,3858 +pip/_internal/models/wheel.py,sha256=wlyz23BcZ40nBLX3rXKtrV6tmc8-8RxHyV-hq5zJ74Q,3525 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/network/__pycache__/auth.cpython-310.pyc,, +pip/_internal/network/__pycache__/cache.cpython-310.pyc,, +pip/_internal/network/__pycache__/download.cpython-310.pyc,, +pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc,, +pip/_internal/network/__pycache__/session.cpython-310.pyc,, +pip/_internal/network/__pycache__/utils.cpython-310.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc,, +pip/_internal/network/auth.py,sha256=a3C7Xaa8kTJjXkdi_wrUjqaySc8Z9Yz7U6QIbXfzMyc,12190 +pip/_internal/network/cache.py,sha256=FJ3uTUo3wgf2KHmeZ3ltN9x3tQoy_0X6qNsRtNXsuL0,2131 +pip/_internal/network/download.py,sha256=12Ef_L7MlhNUN_0-n_3DggozWJER8c9J0us16cbvkKA,6062 +pip/_internal/network/lazy_wheel.py,sha256=1b8ZJ1w4bSBzpGzGwJR_CL2yQ6AFIwWQkS1vbPPw2XU,7627 +pip/_internal/network/session.py,sha256=38IKGKC64MTVUIH5XOR1hr2pOCzp39RccykdmGAvqRU,16729 +pip/_internal/network/utils.py,sha256=igLlTu_-q0LmL8FdJKq-Uj7AT_owrQ-T9FfyarkhK5U,4059 +pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/operations/__pycache__/check.cpython-310.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-310.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-310.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc,, +pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc,, +pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc,, +pip/_internal/operations/build/metadata.py,sha256=ES_uRmAvhrNm_nDTpZxshBfUsvnXtkj-g_4rZrH9Rww,1404 +pip/_internal/operations/build/metadata_editable.py,sha256=_Rai0VZjxoeJUkjkuICrq45LtjwFoDOveosMYH43rKc,1456 +pip/_internal/operations/build/metadata_legacy.py,sha256=o-eU21As175hDC7dluM1fJJ_FqokTIShyWpjKaIpHZw,2198 +pip/_internal/operations/build/wheel.py,sha256=AO9XnTGhTgHtZmU8Dkbfo1OGr41rBuSDjIgAa4zUKgE,1063 +pip/_internal/operations/build/wheel_editable.py,sha256=TVETY-L_M_dSEKBhTIcQOP75zKVXw8tuq1U354Mm30A,1405 +pip/_internal/operations/build/wheel_legacy.py,sha256=C9j6rukgQI1n_JeQLoZGuDdfUwzCXShyIdPTp6edbMQ,3064 +pip/_internal/operations/check.py,sha256=ca4O9CkPt9Em9sLCf3H0iVt1GIcW7M8C0U5XooaBuT4,5109 +pip/_internal/operations/freeze.py,sha256=ZiYw5GlUpLVx4VJHz4S1AP2JFNyvH0iq5kpcYj2ovyw,9770 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc,, +pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=ee4kfJHNuzTdKItbfAsNOSEwq_vD7DRPGkBdK48yBhU,1354 +pip/_internal/operations/install/legacy.py,sha256=x7BG8kBm0K3JO6AR4sBl0zh2LOrfUaz7EdNt-keHBv4,4091 +pip/_internal/operations/install/wheel.py,sha256=QuQyCZE-XjuJjDYRixo40oUt2ucFhNmSrCbcXY7A9aE,27412 +pip/_internal/operations/prepare.py,sha256=LJP97jsuiCAaTGVIRrcINvxc1ntVsB45MoRbyMIukg4,24145 +pip/_internal/pyproject.py,sha256=Wm2ljdT6spC-tSdf1LBRaMYSJaXr1xUxV3OwdHCW9jc,6722 +pip/_internal/req/__init__.py,sha256=A7mUvT1KAcCYP3H7gUOTx2GRMlgoDur3H68Q0OJqM5A,2793 +pip/_internal/req/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-310.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-310.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-310.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-310.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc,, +pip/_internal/req/constructors.py,sha256=fXmtNI_J77JFP_HRvYcQW-1nKw3AiUu6Q3b1Nm8aMm0,16094 +pip/_internal/req/req_file.py,sha256=5N8OTouPCof-305StC2YK9HBxQMw-xO46skRoBPbkZo,17421 +pip/_internal/req/req_install.py,sha256=jU1HQBT_DnXZean7jY8wPNMhb6_CzdKHcilHFY_o-Fc,32524 +pip/_internal/req/req_set.py,sha256=kHYiLvkKRx21WaLTwOI-54Ng0SSzZZ9SE7FD0PsfvYA,7584 +pip/_internal/req/req_tracker.py,sha256=jK7JDu-Wt73X-gqozrFtgJVlUlnQo0P4IQ4x4_gPlfM,4117 +pip/_internal/req/req_uninstall.py,sha256=K2BHYRRJAfkSpFqcPzc9XfX2EvbhaRtQIPRFmMtUdfo,23814 +pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/resolution/__pycache__/base.cpython-310.pyc,, +pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583 +pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc,, +pip/_internal/resolution/legacy/resolver.py,sha256=b7bf5qL1ROg73sl8dhTvLdD1w5XF8xybBAF6eF_kz7c,18288 +pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc,, +pip/_internal/resolution/resolvelib/base.py,sha256=u1O4fkvCO4mhmu5i32xrDv9AX5NgUci_eYVyBDQhTIM,5220 +pip/_internal/resolution/resolvelib/candidates.py,sha256=KR5jxZRSahByOABXbwrX-zNoawa7Gm9Iss-HrvrcvNw,18357 +pip/_internal/resolution/resolvelib/factory.py,sha256=0bbxnUSSjaeTmtIEgeeKtEqhEFfNhv3xpq7j9IaMq2c,28298 +pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705 +pip/_internal/resolution/resolvelib/provider.py,sha256=LzQQyzMVaZYAwLgKInbq-it6mbQL1gX0hGohz5Cr5wg,9915 +pip/_internal/resolution/resolvelib/reporter.py,sha256=3ZVVYrs5PqvLFJkGLcuXoMK5mTInFzl31xjUpDBpZZk,2526 +pip/_internal/resolution/resolvelib/requirements.py,sha256=B1ndvKPSuyyyTEXt9sKhbwminViSWnBrJa7qO2ln4Z0,5455 +pip/_internal/resolution/resolvelib/resolver.py,sha256=ucoVKHtwH6gkZjcfIVJbUiOIHLqJxeYlrKTMIJciYwM,11335 +pip/_internal/self_outdated_check.py,sha256=GKSatNlt2cz_CMGxu72FbUzuPaXpWOnIVKOOYIk0gvY,6849 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/utils/__pycache__/_log.cpython-310.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-310.pyc,, +pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc,, +pip/_internal/utils/__pycache__/datetime.cpython-310.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc,, +pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc,, +pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc,, +pip/_internal/utils/__pycache__/egg_link.cpython-310.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-310.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-310.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-310.pyc,, +pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-310.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-310.pyc,, +pip/_internal/utils/__pycache__/models.cpython-310.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-310.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-310.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-310.pyc,, +pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015 +pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665 +pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884 +pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377 +pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242 +pip/_internal/utils/deprecation.py,sha256=NKo8VqLioJ4nnXXGmW4KdasxF90EFHkZaHeX1fT08C8,3627 +pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206 +pip/_internal/utils/distutils_args.py,sha256=mcAscyp80vTt3xAGTipnpgc83V-_wCvydNELVXLq7JI,1249 +pip/_internal/utils/egg_link.py,sha256=5MVlpz5LirT4iLQq86OYzjXaYF0D4Qk1dprEI7ThST4,2203 +pip/_internal/utils/encoding.py,sha256=bdZ3YgUpaOEBI5MP4-DEXiQarCW3V0rxw1kRz-TaU1Q,1169 +pip/_internal/utils/entrypoints.py,sha256=aPvCnQVi9Hdk35Kloww_D5ibjUpqxgqcJP8O9VuMZek,1055 +pip/_internal/utils/filesystem.py,sha256=rrl-rY1w8TYyKYndUyZlE9ffkQyA4-jI9x_59zXkn5s,5893 +pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716 +pip/_internal/utils/glibc.py,sha256=tDfwVYnJCOC0BNVpItpy8CGLP9BjkxFHdl0mTS0J7fc,3110 +pip/_internal/utils/hashes.py,sha256=anpZfFGIT6HcIj2td9NHtE8AWg6GeAIhwpP8GPvZE0E,4811 +pip/_internal/utils/inject_securetransport.py,sha256=o-QRVMGiENrTJxw3fAhA7uxpdEdw6M41TjHYtSVRrcg,795 +pip/_internal/utils/logging.py,sha256=Rvght-fDXL70VWib1cpgZ3iU-kXODV98bNeLUlbqVto,11522 +pip/_internal/utils/misc.py,sha256=MdUB12BMhj73sEmskEutmPyWFaJB7asoPCfLzs_YeT0,19359 +pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193 +pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108 +pip/_internal/utils/setuptools_build.py,sha256=vNH9hQB9wT6d-h1hVQhBKw91jNeT42meHpVeii-urOI,5652 +pip/_internal/utils/subprocess.py,sha256=vIWGpet5ARBmZ2Qn4NEHNgzCOduqbPIuByZmhhmr6mM,9182 +pip/_internal/utils/temp_dir.py,sha256=zob3PYMVevONkheOMUp_4jDofrEY3HIu5DHK78cSspI,7662 +pip/_internal/utils/unpacking.py,sha256=HUFlMEyCa9dPwdLh6sWeh95DeKytV8rsOyKShEw9y6g,8906 +pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759 +pip/_internal/utils/virtualenv.py,sha256=4_48qMzCwB_F5jIK5BC_ua7uiAMVifmQWU9NdaGUoVA,3459 +pip/_internal/utils/wheel.py,sha256=lXOgZyTlOm5HmK8tw5iw0A3_5A6wRzsXHOaQkIvvloU,4549 +pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596 +pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-310.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc,, +pip/_internal/vcs/bazaar.py,sha256=IGb5ca1xSZfgegRD2_JeyoZPrQQHs7lEYEIgpVsKpoU,3047 +pip/_internal/vcs/git.py,sha256=mjhwudCx9WlLNkxZ6_kOKmueF0rLoU2i1xeASKF6yiQ,18116 +pip/_internal/vcs/mercurial.py,sha256=Bzbd518Jsx-EJI0IhIobiQqiRsUv5TWYnrmRIFWE0Gw,5238 +pip/_internal/vcs/subversion.py,sha256=TEMRdwECvMcXakZX0pTNUep79kmBYkWDkWFkrYmcmac,11718 +pip/_internal/vcs/versioncontrol.py,sha256=KUOc-hN51em9jrqxKwUR3JnkgSE-xSOqMiiJcSaL6B8,22811 +pip/_internal/wheel_builder.py,sha256=65rOA8FSYt3c3HyqEw17uujjlCgqmoKEIv6rv9xN2NM,12307 +pip/_vendor/__init__.py,sha256=xjcBX0EP50pkaMdCssrsBXoZgo2hTtYxlcH1CIyA3T4,4708 +pip/_vendor/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/__pycache__/distro.cpython-310.pyc,, +pip/_vendor/__pycache__/six.cpython-310.pyc,, +pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc,, +pip/_vendor/cachecontrol/__init__.py,sha256=1j_YQfjmiix6YyouLrftC6NzksAm8e8xGSjMKMRPIkM,465 +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc,, +pip/_vendor/cachecontrol/_cmd.py,sha256=lxUXqfNTVx84zf6tcWbkLZHA6WVBRtJRpfeA9ZqhaAY,1379 +pip/_vendor/cachecontrol/adapter.py,sha256=ew9OYEQHEOjvGl06ZsuX8W3DAvHWsQKHwWAxISyGug8,5033 +pip/_vendor/cachecontrol/cache.py,sha256=eMS9Bn9JWQkHiIYA5GPRBqKVU95uS-yXkxrzpoafRig,917 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=gGFOtIH8QDRvkP4YAfGIh-u9YYcGZVxwLM1-6e1mPNI,170 +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc,, +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=P2KHcNXiqxEW7fCq5KC-NYHGSk0nNR9NIKuN-vBTn-E,4251 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=tu_YBV7EV8vdBRGazUErkoRqYYjSBmNcB8dZ7BNomqk,940 +pip/_vendor/cachecontrol/compat.py,sha256=LNx7vqBndYdHU8YuJt53ab_8rzMGTXVrvMb7CZJkxG0,778 +pip/_vendor/cachecontrol/controller.py,sha256=9DSEiV58Gx7Ce69fLCrRcpN-_sHzXTY4ol9bEviatR0,15625 +pip/_vendor/cachecontrol/filewrapper.py,sha256=X4BAQOO26GNOR7nH_fhTzAfeuct2rBQcx_15MyFBpcs,3946 +pip/_vendor/cachecontrol/heuristics.py,sha256=8kAyuZLSCyEIgQr6vbUwfhpqg9ows4mM0IV6DWazevI,4154 +pip/_vendor/cachecontrol/serialize.py,sha256=dlySaeA5U7Q5eHvjiObgo1M8j8_huVjfWjid7Aq-r8c,6783 +pip/_vendor/cachecontrol/wrapper.py,sha256=X3-KMZ20Ho3VtqyVaXclpeQpFzokR5NE8tZSfvKVaB8,774 +pip/_vendor/certifi/__init__.py,sha256=xWdRgntT3j1V95zkRipGOg_A1UfEju2FcpujhysZLRI,62 +pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255 +pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/certifi/__pycache__/__main__.cpython-310.pyc,, +pip/_vendor/certifi/__pycache__/core.cpython-310.pyc,, +pip/_vendor/certifi/cacert.pem,sha256=-og4Keu4zSpgL5shwfhd4kz0eUnVILzrGCi0zRy2kGw,265969 +pip/_vendor/certifi/core.py,sha256=CcwptmiI-3M50jIdO0HT6Fh6W_wqGsf8QcX9yfzvyuc,2791 +pip/_vendor/chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271 +pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/jisfreq.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc,, +pip/_vendor/chardet/__pycache__/version.cpython-310.pyc,, +pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +pip/_vendor/chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839 +pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +pip/_vendor/chardet/cli/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-310.pyc,, +pip/_vendor/chardet/cli/chardetect.py,sha256=XK5zqjUG2a4-y6eLHZ8ThYcp6WWUrdlmELxNypcc2SE,2747 +pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +pip/_vendor/chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200 +pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +pip/_vendor/chardet/langbulgarianmodel.py,sha256=rk9CJpuxO0bObboJcv6gNgWuosYZmd8qEEds5y7DS_Y,105697 +pip/_vendor/chardet/langgreekmodel.py,sha256=S-uNQ1ihC75yhBvSux24gLFZv3QyctMwC6OxLJdX-bw,99571 +pip/_vendor/chardet/langhebrewmodel.py,sha256=DzPP6TPGG_-PV7tqspu_d8duueqm7uN-5eQ0aHUw1Gg,98776 +pip/_vendor/chardet/langhungarianmodel.py,sha256=RtJH7DZdsmaHqyK46Kkmnk5wQHiJwJPPJSqqIlpeZRc,102498 +pip/_vendor/chardet/langrussianmodel.py,sha256=THqJOhSxiTQcHboDNSc5yofc2koXXQFHFyjtyuntUfM,131180 +pip/_vendor/chardet/langthaimodel.py,sha256=R1wXHnUMtejpw0JnH_JO8XdYasME6wjVqp1zP7TKLgg,103312 +pip/_vendor/chardet/langturkishmodel.py,sha256=rfwanTptTwSycE4-P-QasPmzd-XVYgevytzjlEzBBu8,95946 +pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/chardet/metadata/__pycache__/languages.cpython-310.pyc,, +pip/_vendor/chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474 +pip/_vendor/chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136 +pip/_vendor/chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309 +pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +pip/_vendor/chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503 +pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +pip/_vendor/chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242 +pip/_vendor/colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239 +pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc,, +pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 +pip/_vendor/colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517 +pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 +pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 +pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 +pip/_vendor/distlib/__init__.py,sha256=y-rKDBB99QJ3N1PJGAXQo89ou615aAeBjV2brBxKgM8,581 +pip/_vendor/distlib/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-310.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc,, +pip/_vendor/distlib/compat.py,sha256=tfoMrj6tujk7G4UC2owL6ArgDuCKabgBxuJRGZSmpko,41259 +pip/_vendor/distlib/database.py,sha256=hBO2dgvDF7W3BqX8Ecns6p_RPerCaIbNKbdUOuJ1a14,51456 +pip/_vendor/distlib/index.py,sha256=UfcimNW19AB7IKWam4VaJbXuCBvArKfSxhV16EwavzE,20739 +pip/_vendor/distlib/locators.py,sha256=4D2hEcHePNuW4mXEZ3Cuw12eW-vbO-4WuAlbf4h5K7w,51963 +pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811 +pip/_vendor/distlib/markers.py,sha256=TpHHHLgkzyT7YHbwj-2i6weRaq-Ivy2-MUnrDkjau-U,5058 +pip/_vendor/distlib/metadata.py,sha256=vatoxFdmBr6ie-sTVXVNPOPG3uwMDWJTnEECnm7xDCw,39109 +pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820 +pip/_vendor/distlib/scripts.py,sha256=tjSwENINeV91ROZxec5zTSMRg2jEeKc4enyCHDzNvEE,17720 +pip/_vendor/distlib/util.py,sha256=31dPXn3Rfat0xZLeVoFpuniyhe6vsbl9_QN-qd9Lhlk,66262 +pip/_vendor/distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513 +pip/_vendor/distlib/wheel.py,sha256=pj5VVCjqZMcHvgizORWwAFPS7hOk61CZ59dxP8laQ4E,42943 +pip/_vendor/distro.py,sha256=O1EeHMq1-xAO373JI2_6pYEtd09yEkxtmrYkdY-9S-w,48414 +pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160 +pip/_vendor/html5lib/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/_inputstream.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/_utils.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-310.pyc,, +pip/_vendor/html5lib/__pycache__/serializer.cpython-310.pyc,, +pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728 +pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353 +pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040 +pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109 +pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-310.pyc,, +pip/_vendor/html5lib/_trie/__pycache__/py.cpython-310.pyc,, +pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013 +pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931 +pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/base.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-310.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-310.pyc,, +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919 +pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945 +pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897 +pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214 +pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186 +pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679 +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-310.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-310.pyc,, +pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592 +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-310.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-310.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-310.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-310.pyc,, +pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719 +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-310.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-310.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-310.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-310.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-310.pyc,, +pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551 +pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357 +pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309 +pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +pip/_vendor/idna/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/codec.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/compat.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/core.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/intranges.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/package_data.cpython-310.pyc,, +pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc,, +pip/_vendor/idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +pip/_vendor/idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795 +pip/_vendor/idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025 +pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +pip/_vendor/idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21 +pip/_vendor/idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400 +pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118 +pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/msgpack/__pycache__/_version.cpython-310.pyc,, +pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc,, +pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc,, +pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc,, +pip/_vendor/msgpack/_version.py,sha256=JpTcnRd3YUioA24NDtDZbLW0Nhl2yA-N1Rq2lLDBB-g,20 +pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081 +pip/_vendor/msgpack/ext.py,sha256=4l356Y4sVEcvCla2dh_cL57vh4GMhZfa3kuWHFHYz6A,6088 +pip/_vendor/msgpack/fallback.py,sha256=L5jriXysURbf6rPbbHbvXgvoFrKZiryIBmujMTcrf3A,34475 +pip/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661 +pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497 +pip/_vendor/packaging/__pycache__/__about__.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/_manylinux.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/_musllinux.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/tags.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-310.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-310.pyc,, +pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488 +pip/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378 +pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487 +pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676 +pip/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110 +pip/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699 +pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200 +pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665 +pip/_vendor/pep517/__init__.py,sha256=Y1bATL2qbFNN6M_DQa4yyrwqjpIiL-j9T6kBmR0DS14,130 +pip/_vendor/pep517/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/build.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/check.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/colorlog.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/compat.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/dirtools.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/envbuild.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/meta.cpython-310.pyc,, +pip/_vendor/pep517/__pycache__/wrappers.cpython-310.pyc,, +pip/_vendor/pep517/build.py,sha256=2bar6EdjwIz2Dlfy94qdxn3oA9mVnnny40mfoT5f-qI,3457 +pip/_vendor/pep517/check.py,sha256=bCORq1WrHjhpTONa-zpAqG0EB9rHNuhO1ORu6DsDuL8,6084 +pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098 +pip/_vendor/pep517/compat.py,sha256=NmLImE5oiDT3gbEhJ4w7xeoMFcpAPrGu_NltBytSJUY,1253 +pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129 +pip/_vendor/pep517/envbuild.py,sha256=zFde--rmzjXMLXcm7SA_3hDtgk5VCTA8hjpk88RbF6E,6100 +pip/_vendor/pep517/in_process/__init__.py,sha256=MyWoAi8JHdcBv7yXuWpUSVADbx6LSB9rZh7kTIgdA8Y,563 +pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-310.pyc,, +pip/_vendor/pep517/in_process/_in_process.py,sha256=D3waguyNSGcwosociD5USfcycYr2RCzCjYtxX5UHQmQ,11201 +pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463 +pip/_vendor/pep517/wrappers.py,sha256=impq7Cz_LL1iDF1iiOzYWB4MaEu6O6Gps7TJ5qsJz1Q,13429 +pip/_vendor/pkg_resources/__init__.py,sha256=NnpQ3g6BCHzpMgOR_OLBmYtniY4oOzdKpwqghfq_6ug,108287 +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc,, +pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562 +pip/_vendor/platformdirs/__init__.py,sha256=Aizpxewwd4nY63Gqw-Od1Rso9Ah4bSoc6rkx-GBRu2Y,12676 +pip/_vendor/platformdirs/__main__.py,sha256=ZmsnTxEOxtTvwa-Y_Vfab_JN3X4XCVeN8X0yyy9-qnc,1176 +pip/_vendor/platformdirs/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/__main__.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/android.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/api.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/macos.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/unix.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc,, +pip/_vendor/platformdirs/__pycache__/windows.cpython-310.pyc,, +pip/_vendor/platformdirs/android.py,sha256=xhlD4NmrKCARe5lgnpBGYo4lOYxEOBOByNDNYy91gEE,4012 +pip/_vendor/platformdirs/api.py,sha256=MXKHXOL3eh_-trSok-JUTjAR_zjmmKF3rjREVABjP8s,4910 +pip/_vendor/platformdirs/macos.py,sha256=-3UXQewbT0yMhMdkzRXfXGAntmLIH7Qt4a9Hlf8I5_Y,2655 +pip/_vendor/platformdirs/unix.py,sha256=b4aVYTz0qZ50HntwOXo8r6tp82jAa3qTjxw-WlnC2yc,6910 +pip/_vendor/platformdirs/version.py,sha256=bXzLJCe23FNQRQrf7ZRWKejxWnct_wft7dxdkMGT33E,80 +pip/_vendor/platformdirs/windows.py,sha256=ISruopR5UGBePC0BxCxXevkZYfjJsIZc49YWU5iYfQ4,6439 +pip/_vendor/progress/__init__.py,sha256=1HejNZtv2ouUNQeStUDAtZrtwkz_3FmYKQ476hJ7zOs,5294 +pip/_vendor/progress/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-310.pyc,, +pip/_vendor/progress/__pycache__/colors.cpython-310.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-310.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-310.pyc,, +pip/_vendor/progress/bar.py,sha256=GbedY0oZ-Q1duXjmvVLO0tSf-uTSH7hJ3zzyI91Esws,2942 +pip/_vendor/progress/colors.py,sha256=cCYXQnYFYVmQKKmYEbQ_lj6SPSFzdw4FN98F2x2kR-U,2655 +pip/_vendor/progress/counter.py,sha256=zYt9DWH0_05s8Q9TrJwHVud-WwsyyaR3PwYtk5hxwwQ,1613 +pip/_vendor/progress/spinner.py,sha256=u5ElzW94XEiLGH-aAlr54VJtKfeK745xr6UfGvvflzU,1461 +pip/_vendor/pygments/__init__.py,sha256=CAmA9UthykwxvtutUcH0IxqtiyQcSg6CmYdM-jKlcRY,3002 +pip/_vendor/pygments/__main__.py,sha256=X7rGLMUC54EXgO14FZ9goKXZDmhPzKXTsUglmb_McIU,353 +pip/_vendor/pygments/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/__main__.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/cmdline.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/console.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/filter.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/formatter.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/lexer.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/modeline.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/plugin.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/regexopt.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/scanner.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/sphinxext.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/style.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/token.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/unistring.cpython-310.pyc,, +pip/_vendor/pygments/__pycache__/util.cpython-310.pyc,, +pip/_vendor/pygments/cmdline.py,sha256=XpsyWgErcSqHC7rXiYKLF3Y61Uy8SR2DNQDDhZGuezg,23408 +pip/_vendor/pygments/console.py,sha256=QZXBUAkyl4dPLQ1e6XHjQu3mmXBWvuGQwsQT2q1mtCY,1697 +pip/_vendor/pygments/filter.py,sha256=35iMZiB1rcuogxokm92kViB2DPXPp_wWoxWuMmwvvzY,1938 +pip/_vendor/pygments/filters/__init__.py,sha256=-veOimzCyYGEARru2Dfo6ofSYcZ8tGsIVuMprtaZQ24,40292 +pip/_vendor/pygments/filters/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pygments/formatter.py,sha256=zSBbX2U_OOriy7SJvSTK6OAxjuXtROWxQlNpJEJZjBA,2917 +pip/_vendor/pygments/formatters/__init__.py,sha256=fjkYDy5-F998XczKi0ymHFayr5ObIRLHF8cgp9k8kpA,5119 +pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/groff.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/html.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/img.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/irc.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/latex.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/other.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/svg.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-310.pyc,, +pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-310.pyc,, +pip/_vendor/pygments/formatters/_mapping.py,sha256=3A1rYSjYN9MLduCFWy2_mYhllPVpwlw55anRYnPXX8w,6516 +pip/_vendor/pygments/formatters/bbcode.py,sha256=cSKMOioUnE4TzvCCsK4IbJ6G78W07ZwHtkz4V1Wte0U,3314 +pip/_vendor/pygments/formatters/groff.py,sha256=ULgMKvGeLswX0KZn3IBp0p0U3rruiSHBtpl6O5qbqLs,5005 +pip/_vendor/pygments/formatters/html.py,sha256=0jM7Jc4xA4tsjmPq35uklm_En_OVdcNb0__SEXp2pDQ,35330 +pip/_vendor/pygments/formatters/img.py,sha256=r4iag_jCfyv_LhIt-1fRDeVEEoAfVJzkD9nZChIwiS8,21819 +pip/_vendor/pygments/formatters/irc.py,sha256=gi_IeIZeNaTfTMtvseLigZdS6lNicN7r7O7rnI6myo0,5871 +pip/_vendor/pygments/formatters/latex.py,sha256=qZUerrHt2Nn2aB4gJcdqj99qBkIxl_1v1ukYsf230Gk,18930 +pip/_vendor/pygments/formatters/other.py,sha256=Q01LtkqPZ8m_EYdgMVzXPUGjHoL00lXI3By97wzytYU,5073 +pip/_vendor/pygments/formatters/pangomarkup.py,sha256=ZpjALTSuGFwviJd5kOYwr-1NgqxCX3XRJrjXC7x1UbQ,2212 +pip/_vendor/pygments/formatters/rtf.py,sha256=qh7-z_wbUsTY6z7fZUGrYECYBlWB0wEdBwIZVEVybL0,5014 +pip/_vendor/pygments/formatters/svg.py,sha256=T7Jj004I3JUPOr48aAhQ368K2qWCciUyMQ2tdU-LB-4,7335 +pip/_vendor/pygments/formatters/terminal.py,sha256=cRD5hitINOkYlGZo9ma252vpJYPSGNgLivrsm6zGyec,4674 +pip/_vendor/pygments/formatters/terminal256.py,sha256=Bvz9zZL3UWc94TDm1GhKMI4x0BTit0XplhyRL0zmtkw,11753 +pip/_vendor/pygments/lexer.py,sha256=ECXWlEsbRnKs_njozZns6BGQ4riTMzct_BzAr3zV6dY,31937 +pip/_vendor/pygments/lexers/__init__.py,sha256=6Ds0GVBP3jrIU02wmjRdpoL4eFGhwT2IVD1zf3cV5_Y,11307 +pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-310.pyc,, +pip/_vendor/pygments/lexers/__pycache__/python.cpython-310.pyc,, +pip/_vendor/pygments/lexers/_mapping.py,sha256=jAxmvh5wvNkD-p3Fh6E7hY_B0sGbcxWRfseT6iq7ex4,70032 +pip/_vendor/pygments/lexers/python.py,sha256=LXnk43Lcngqn9xj6eRqdk2f73oF4kHZWiwgHMM_RlVM,52776 +pip/_vendor/pygments/modeline.py,sha256=37fen3cf1moCz4vMVJqX41eAQCmj8pzUchikgPcHp-U,986 +pip/_vendor/pygments/plugin.py,sha256=zGSig3S7QX-3o6RDxd4_Uvice_t25l_BN9aQQ9k8vmU,1727 +pip/_vendor/pygments/regexopt.py,sha256=mj8Fgu3sT0d5PZwRwDLexEvVOQbuHeosubQnqVwgiqs,3072 +pip/_vendor/pygments/scanner.py,sha256=nGoHy-Npk2ylUd4bws_CJN1hK785Xqo8e0teRmNX2jo,3091 +pip/_vendor/pygments/sphinxext.py,sha256=FZ2puvLe2Bztqtj6UJvQd7D8TvtOZ1GsfRJObvH59tE,4630 +pip/_vendor/pygments/style.py,sha256=lGyan5bU42q1kGMfFqafwL3g1j5EurTvfkv8vdP7NzQ,6257 +pip/_vendor/pygments/styles/__init__.py,sha256=Qx2zq6ufbDNE2cTp51M-s9zW-sDE-KLIqFw31qr3Bhg,3252 +pip/_vendor/pygments/styles/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pygments/token.py,sha256=lNPgeaQTzu2DEUi6n_lxAIU7uy4DVj8LMI3nSVnTjks,6143 +pip/_vendor/pygments/unistring.py,sha256=Xs0FzOzE0l0iWRoTlcgi-Q_kAMdF5Gt5FL_goGKJc98,63188 +pip/_vendor/pygments/util.py,sha256=s9n8BQXIxG3lIwCPWv5-ci8yhaqq5JbEVK9v8Z-8_3I,9123 +pip/_vendor/pyparsing/__init__.py,sha256=jXheGTFT1b6r_4WxuOE0uVUqiouLJ3WHzOScpLieRgQ,9107 +pip/_vendor/pyparsing/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/actions.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/common.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/core.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/exceptions.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/helpers.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/results.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/testing.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/unicode.cpython-310.pyc,, +pip/_vendor/pyparsing/__pycache__/util.cpython-310.pyc,, +pip/_vendor/pyparsing/actions.py,sha256=60v7mETOBzc01YPH_qQD5isavgcSJpAfIKpzgjM3vaU,6429 +pip/_vendor/pyparsing/common.py,sha256=lFL97ooIeR75CmW5hjURZqwDCTgruqltcTCZ-ulLO2Q,12936 +pip/_vendor/pyparsing/core.py,sha256=GtQsD06HlwKPc7M8K8hyOuOW-cRnd87AxAHq-ad5lEk,212248 +pip/_vendor/pyparsing/diagram/__init__.py,sha256=h0gsUwmo5N3shgvfXVQTtqvTpUAv-ZdQjSQ6IUJmsxY,22165 +pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/pyparsing/exceptions.py,sha256=H4D9gqMavqmAFSsdrU_J6bO-jA-T-A7yvtXWZpooIUA,9030 +pip/_vendor/pyparsing/helpers.py,sha256=kqpIZFG-y0fQ3g_TmloYllo9we6YCYiewZMXIK0y5wc,38299 +pip/_vendor/pyparsing/results.py,sha256=4D-oURF1cLeL7k0d3zMqUuWH_gTjop_OrZwik9O0HXU,25339 +pip/_vendor/pyparsing/testing.py,sha256=szs8AKZREZMhL0y0vsMfaTVAnpqPHetg6VKJBNmc4QY,13388 +pip/_vendor/pyparsing/unicode.py,sha256=IR-ioeGY29cZ49tG8Ts7ITPWWNP5G2DcZs58oa8zn44,10381 +pip/_vendor/pyparsing/util.py,sha256=kq772O5YSeXOSdP-M31EWpbH_ayj7BMHImBYo9xPD5M,6805 +pip/_vendor/requests/__init__.py,sha256=6IUFQM6K9V2NIu4fe4LtUsN21-TFbw_w3EfPpdUN-qc,5130 +pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/help.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/packages.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-310.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-310.pyc,, +pip/_vendor/requests/__version__.py,sha256=q8miOQaomOv3S74lK4eQs1zZ5jwcnOusyEU-M2idhts,441 +pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096 +pip/_vendor/requests/adapters.py,sha256=WazYJQ_b2LHhNDb_y0hscNlWVsSe5ca5I3pymPrer5w,21861 +pip/_vendor/requests/api.py,sha256=hjuoP79IAEmX6Dysrw8t032cLfwLHxbI_wM4gC5G9t0,6402 +pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207 +pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465 +pip/_vendor/requests/compat.py,sha256=N1281mkcTluMjKqCSLf88LR6HNOygEhS1TbR9LLsoVY,2114 +pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430 +pip/_vendor/requests/exceptions.py,sha256=VcpBXOL-9JYhNbK8OZxCIImBgpQSXJlUelDPf1f-pmM,3446 +pip/_vendor/requests/help.py,sha256=dyhe3lcmHXnFCzDiZVjcGmVvvO_jtsfAm-AC542ndw8,3972 +pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757 +pip/_vendor/requests/models.py,sha256=7pzscX_47qxx7-zEaBWGxMoB33Vdf6HLoUKZh1ktEvM,35116 +pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695 +pip/_vendor/requests/sessions.py,sha256=Zu-Y9YPlwTIsyFx1hvIrc3ziyeFpuFPqcOuSuz8BNWs,29835 +pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188 +pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005 +pip/_vendor/requests/utils.py,sha256=siud-FQ6xgKFbL49DRvAb3PMQMMHoeCL_TCmuHh9AUU,33301 +pip/_vendor/resolvelib/__init__.py,sha256=UL-B2BDI0_TRIqkfGwLHKLxY-LjBlomz7941wDqzB1I,537 +pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc,, +pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc,, +pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc,, +pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc,, +pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc,, +pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156 +pip/_vendor/resolvelib/providers.py,sha256=roVmFBItQJ0TkhNua65h8LdNny7rmeqVEXZu90QiP4o,5872 +pip/_vendor/resolvelib/reporters.py,sha256=fW91NKf-lK8XN7i6Yd_rczL5QeOT3sc6AKhpaTEnP3E,1583 +pip/_vendor/resolvelib/resolvers.py,sha256=2wYzVGBGerbmcIpH8cFmgSKgLSETz8jmwBMGjCBMHG4,17592 +pip/_vendor/resolvelib/structs.py,sha256=IVIYof6sA_N4ZEiE1C1UhzTX495brCNnyCdgq6CYq28,4794 +pip/_vendor/rich/__init__.py,sha256=wF1th4JGBCVC02xfaw8j6P2MrFcJaQJL72scKtEmDYQ,5804 +pip/_vendor/rich/__main__.py,sha256=vd1PP-o7_1un-ThdgMU9LHV-D8z56yz_-fryczn38eE,8810 +pip/_vendor/rich/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/__main__.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_cell_widths.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_emoji_replace.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_extension.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_inspect.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_log_render.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_loop.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_lru_cache.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_palettes.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_pick.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_ratio.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_spinners.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_stack.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_timer.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_windows.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/_wrap.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/abc.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/align.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/ansi.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/bar.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/box.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/cells.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/color.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/color_triplet.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/columns.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/console.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/constrain.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/containers.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/control.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/default_styles.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/diagnose.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/emoji.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/errors.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/file_proxy.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/filesize.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/highlighter.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/json.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/jupyter.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/layout.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/live.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/live_render.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/logging.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/markup.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/measure.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/padding.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/pager.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/palette.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/panel.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/pretty.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/progress.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/progress_bar.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/prompt.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/protocol.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/region.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/repr.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/rule.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/scope.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/screen.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/segment.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/spinner.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/status.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/style.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/styled.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/syntax.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/table.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/tabulate.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/terminal_theme.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/text.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/theme.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/themes.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/traceback.cpython-310.pyc,, +pip/_vendor/rich/__pycache__/tree.cpython-310.pyc,, +pip/_vendor/rich/_cell_widths.py,sha256=2n4EiJi3X9sqIq0O16kUZ_zy6UYMd3xFfChlKfnW1Hc,10096 +pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235 +pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064 +pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265 +pip/_vendor/rich/_inspect.py,sha256=vq6BjewwEvddjcBTr_lCcjYQBsKi92aTNpcXyaA5ERA,7444 +pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225 +pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236 +pip/_vendor/rich/_lru_cache.py,sha256=M7H1ZQF32o6SxrpOur9zTIhEHlNXT9XnrcdhruUmG5I,1246 +pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063 +pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423 +pip/_vendor/rich/_ratio.py,sha256=2lLSliL025Y-YMfdfGbutkQDevhcyDqc-DtUYW9mU70,5472 +pip/_vendor/rich/_spinners.py,sha256=huT1biTlwyp9Lm8S7bLfVzg1psUaIH5xHDwTaWEHVh0,26521 +pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351 +pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417 +pip/_vendor/rich/_windows.py,sha256=nBO71icHMIHlzT7hg6fkoIdh1mT-5MvDdPDwunkshyw,2065 +pip/_vendor/rich/_wrap.py,sha256=OtnSxnERkuNlSM1d_MYtNg8KIYTcTBk3peg16dCZH_U,1804 +pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890 +pip/_vendor/rich/align.py,sha256=2zRHV8SzR5eP-vQkSDgjmgsBLBluCBwykgejAW6oRD0,10425 +pip/_vendor/rich/ansi.py,sha256=QaVVkfvVL6C3OsuWI9iQ-iJFkMsMohjYlxgMLnVTEPo,6676 +pip/_vendor/rich/bar.py,sha256=a7UD303BccRCrEhGjfMElpv5RFYIinaAhAuqYqhUvmw,3264 +pip/_vendor/rich/box.py,sha256=o0ywz1iW0WjGLPrRVDAZPh1CVPEgAOaWsn8Bf3sf43g,9069 +pip/_vendor/rich/cells.py,sha256=NadN20gFxE8Aj-2S3Drn7qgn-ZpsRZcNnTNtweRL7rA,4285 +pip/_vendor/rich/color.py,sha256=SD3yTf3t8japb-jOv8GYCMCDqyzpipzXS_0rAXhSlU4,17285 +pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054 +pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131 +pip/_vendor/rich/console.py,sha256=bioCy8012eZ8PIOBxMyyqxYPltKk2pGEG9jmwylNCQk,81236 +pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288 +pip/_vendor/rich/containers.py,sha256=aKgm5UDHn5Nmui6IJaKdsZhbHClh_X7D-_Wg8Ehrr7s,5497 +pip/_vendor/rich/control.py,sha256=qxg6Yjd78XuF0VxthlT8O4dpvpACYwKkBfm2S4-IvHA,5298 +pip/_vendor/rich/default_styles.py,sha256=At42PcWzmnYWcx5fUOKyOUpI8HK5m4ItZqxkgHToaMs,7614 +pip/_vendor/rich/diagnose.py,sha256=4L8SZfbqjIRotzJ39QzD9-d4I80FyV1mNKHryg1eArE,183 +pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501 +pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642 +pip/_vendor/rich/file_proxy.py,sha256=fHeReSO3VJ7IbH_9ri-OrPYbFC3UYOzeTNjngiiWOcY,1613 +pip/_vendor/rich/filesize.py,sha256=oQJnM5_7ygkpzt3GtNq5l3F6gmB7YahBA5vpdQVKLwI,2511 +pip/_vendor/rich/highlighter.py,sha256=AdhjC0meTYswZ_xKgka0cRYdNjLABLUzHAbyF3QpPWo,4894 +pip/_vendor/rich/json.py,sha256=RCm4lXBXrjvXHpqrWPH8wdGP0jEo4IohLmkddlhRY18,5051 +pip/_vendor/rich/jupyter.py,sha256=4sxNAwJs4g3dYfWy_enPw9fp0Tdn-82tV4T9uh9vAOM,3025 +pip/_vendor/rich/layout.py,sha256=b64KMDP2EPiC103P-v-_VZKGY13oWiiGS418P_KRRlc,14048 +pip/_vendor/rich/live.py,sha256=OKxMaFU5sFfuR--cJftGYjSvg1VPQri1U_DNZUjCsvI,13711 +pip/_vendor/rich/live_render.py,sha256=zElm3PrfSIvjOce28zETHMIUf9pFYSUA5o0AflgUP64,3667 +pip/_vendor/rich/logging.py,sha256=YNcCSK6pCo2Wg6JKqScAe6VgFqebHBnS5nDnBO4gXAA,10868 +pip/_vendor/rich/markup.py,sha256=hsVW_k1TIvj5OPPQ12ihAii9HSVa8N1TStvA5B2GGpo,8058 +pip/_vendor/rich/measure.py,sha256=Z74XvzIgLZm0xH-QIo1uX5d4oahavHe8D8MKyxLNqPQ,5258 +pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970 +pip/_vendor/rich/pager.py,sha256=VK_2EfH0JduZWdyV-KZma06bvi_V5PWmHG6W7BoiaTg,838 +pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396 +pip/_vendor/rich/panel.py,sha256=O6ORyIhDcOLSEasTjpcDvmhvIcppPGCeQoXpoycIUT8,8637 +pip/_vendor/rich/pretty.py,sha256=HAB68BpYysaL1EXeV4X5Tt-U2hDlcLpbFz06fkojWWE,32572 +pip/_vendor/rich/progress.py,sha256=jcgi7aMnQ_YjSpAmQkalwtNsgVn9i56SeZGprr7tuOk,35926 +pip/_vendor/rich/progress_bar.py,sha256=ELiBaxJOgsRYKpNIrot7BC0bFXvmf8cTd6nxI02BbK0,7762 +pip/_vendor/rich/prompt.py,sha256=gKVd13YWv6jedzwcRPZGUINBjC-xcJhJ_xz_NvMW80c,11307 +pip/_vendor/rich/protocol.py,sha256=Vx6n4fEoSDhzSup8t3KH0iK2RWyssIOks5E0S1qw1GA,1401 +pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166 +pip/_vendor/rich/repr.py,sha256=1A0U0_ibG_bZbw71pUBIctO9Az-CQUuyOTbiKcJOwyw,4309 +pip/_vendor/rich/rule.py,sha256=cPK6NYo4kzh-vM_8a-rXajXplsbaHa6ahErYvGSsrJ0,4197 +pip/_vendor/rich/scope.py,sha256=HX13XsJfqzQHpPfw4Jn9JmJjCsRj9uhHxXQEqjkwyLA,2842 +pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591 +pip/_vendor/rich/segment.py,sha256=MBBAWaHyqCQFCfiNbrTW4BGaFR1uU31XktJ1S3Taqb4,23916 +pip/_vendor/rich/spinner.py,sha256=V6dW0jIk5IO0_2MyxyftQf5VjCHI0T2cRhJ4F31hPIQ,4312 +pip/_vendor/rich/status.py,sha256=gJsIXIZeSo3urOyxRUjs6VrhX5CZrA0NxIQ-dxhCnwo,4425 +pip/_vendor/rich/style.py,sha256=AD1I7atfclsFCtGeL8ronH1Jj-02WLp9ZQ2VYqmpBjM,26469 +pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258 +pip/_vendor/rich/syntax.py,sha256=pJAD08ywowg5xVwTGCqUOMpDYskjoMoDYEV-hryEX5s,26994 +pip/_vendor/rich/table.py,sha256=oQAEBaV4zMUPyg_tSA93_GrCirdIf-osolxf9wb3pEo,36757 +pip/_vendor/rich/tabulate.py,sha256=nl0oeNbiXectEgTHyj3K7eN4NZMISpaogpOdZyEOGbs,1700 +pip/_vendor/rich/terminal_theme.py,sha256=E0nI_ycFpvflamt-KVCY4J52LmUjRi1Y6ICB-Ef3gMo,1459 +pip/_vendor/rich/text.py,sha256=auX3LpY-I6PBiNyxB3o3LyMEx7lna2cx9IbNQJDwtw8,44424 +pip/_vendor/rich/theme.py,sha256=GKNtQhDBZKAzDaY0vQVQQFzbc0uWfFe6CJXA-syT7zQ,3627 +pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102 +pip/_vendor/rich/traceback.py,sha256=hAU3IR295eFuup_px2NU4aCEWu7KQs1qpZbnqoHCtR0,25935 +pip/_vendor/rich/tree.py,sha256=JxyWbc27ZuwoLQnd7I-rSsRsqI9lzaVKlfTLJXla9U0,9122 +pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 +pip/_vendor/tenacity/__init__.py,sha256=GLLsTFD4Bd5VDgTR6mU_FxyOsrxc48qONorVaRebeD4,18257 +pip/_vendor/tenacity/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/_asyncio.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/_utils.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/after.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/before.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/before_sleep.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/nap.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/retry.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/stop.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-310.pyc,, +pip/_vendor/tenacity/__pycache__/wait.cpython-310.pyc,, +pip/_vendor/tenacity/_asyncio.py,sha256=HEb0BVJEeBJE9P-m9XBxh1KcaF96BwoeqkJCL5sbVcQ,3314 +pip/_vendor/tenacity/_utils.py,sha256=-y68scDcyoqvTJuJJ0GTfjdSCljEYlbCYvgk7nM4NdM,1944 +pip/_vendor/tenacity/after.py,sha256=dlmyxxFy2uqpLXDr838DiEd7jgv2AGthsWHGYcGYsaI,1496 +pip/_vendor/tenacity/before.py,sha256=7XtvRmO0dRWUp8SVn24OvIiGFj8-4OP5muQRUiWgLh0,1376 +pip/_vendor/tenacity/before_sleep.py,sha256=ThyDvqKU5yle_IvYQz_b6Tp6UjUS0PhVp6zgqYl9U6Y,1908 +pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383 +pip/_vendor/tenacity/retry.py,sha256=62R71W59bQjuNyFKsDM7hE2aEkEPtwNBRA0tnsEvgSk,6645 +pip/_vendor/tenacity/stop.py,sha256=sKHmHaoSaW6sKu3dTxUVKr1-stVkY7lw4Y9yjZU30zQ,2790 +pip/_vendor/tenacity/tornadoweb.py,sha256=E8lWO2nwe6dJgoB-N2HhQprYLDLB_UdSgFnv-EN6wKE,2145 +pip/_vendor/tenacity/wait.py,sha256=e_Saa6I2tsNLpCL1t9897wN2fGb0XQMQlE4bU2t9V2w,6691 +pip/_vendor/tomli/__init__.py,sha256=z1Elt0nLAqU5Y0DOn9p__8QnLWavlEOpRyQikdYgKro,230 +pip/_vendor/tomli/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/tomli/__pycache__/_parser.cpython-310.pyc,, +pip/_vendor/tomli/__pycache__/_re.cpython-310.pyc,, +pip/_vendor/tomli/_parser.py,sha256=50BD4o9YbzFAGAYyZLqZC8F81DQ7iWWyJnrHNwBKa6A,22415 +pip/_vendor/tomli/_re.py,sha256=5GPfgXKteg7wRFCF-DzlkAPI2ilHbkMK2-JC49F-AJQ,2681 +pip/_vendor/typing_extensions.py,sha256=1uqi_RSlI7gos4eJB_NEV3d5wQwzTUQHd3_jrkbTo8Q,87149 +pip/_vendor/urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763 +pip/_vendor/urllib3/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/_collections.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/_version.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/connection.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/connectionpool.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/fields.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/filepost.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/poolmanager.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/request.cpython-310.pyc,, +pip/_vendor/urllib3/__pycache__/response.cpython-310.pyc,, +pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811 +pip/_vendor/urllib3/_version.py,sha256=_NdMUQaeBvFHAX2z3zAIX2Wum58A6rVtY1f7ByHsQ4g,63 +pip/_vendor/urllib3/connection.py,sha256=6zokyboYYKm9VkyrQvVVLgxMyCZK7n9Vmg_2ZK6pbhc,20076 +pip/_vendor/urllib3/connectionpool.py,sha256=qz-ICrW6g4TZVCbDQ8fRe68BMpXkskkR9vAVY9zUWtA,39013 +pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957 +pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc,, +pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632 +pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922 +pip/_vendor/urllib3/contrib/appengine.py,sha256=lfzpHFmJiO82shClLEm3QB62SYgHWnjpZOH_2JhU5Tc,11034 +pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538 +pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=DD4pInv_3OEEGffEFynBoirc8ldR789sLmGSKukzA0E,16900 +pip/_vendor/urllib3/contrib/securetransport.py,sha256=4qUKo7PUV-vVIqXmr2BD-sH7qplB918jiD5eNsRI9vU,34449 +pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097 +pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217 +pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579 +pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440 +pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/urllib3/packages/__pycache__/six.cpython-310.pyc,, +pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-310.pyc,, +pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417 +pip/_vendor/urllib3/packages/six.py,sha256=1LVW7ljqRirFlfExjwl-v1B7vSAUNTmzGMs-qays2zg,34666 +pip/_vendor/urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763 +pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985 +pip/_vendor/urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203 +pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155 +pip/_vendor/urllib3/util/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/connection.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/proxy.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/queue.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/request.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/response.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/retry.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/timeout.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/url.cpython-310.pyc,, +pip/_vendor/urllib3/util/__pycache__/wait.cpython-310.pyc,, +pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901 +pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605 +pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498 +pip/_vendor/urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123 +pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510 +pip/_vendor/urllib3/util/retry.py,sha256=iESg2PvViNdXBRY4MpL4h0kqwOOkHkxmLn1kkhFHPU8,22001 +pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177 +pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=w01jCYuwvQ038p9mhc1P1gF8IiTN1qHakThpoukOlbw,5751 +pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895 +pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003 +pip/_vendor/urllib3/util/url.py,sha256=QVEzcbHipbXyCWwH6R4K4TR-N8T4LM55WEMwNUTBmLE,14047 +pip/_vendor/urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404 +pip/_vendor/vendor.txt,sha256=H-9fScoah7nx4K8O4Uft0l5iH2P_mVo4RqyuMVOTJEc,496 +pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579 +pip/_vendor/webencodings/__pycache__/__init__.cpython-310.pyc,, +pip/_vendor/webencodings/__pycache__/labels.cpython-310.pyc,, +pip/_vendor/webencodings/__pycache__/mklabels.cpython-310.pyc,, +pip/_vendor/webencodings/__pycache__/tests.cpython-310.pyc,, +pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-310.pyc,, +pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979 +pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305 +pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563 +pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307 +pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286 diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/REQUESTED b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/WHEEL b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/WHEEL new file mode 100644 index 000000000..becc9a66e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/entry_points.txt b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/entry_points.txt new file mode 100644 index 000000000..c4ad52128 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.10 = pip._internal.cli.main:main + diff --git a/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/top_level.txt b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/top_level.txt new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip-22.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/myenv/lib/python3.10/site-packages/pip/__init__.py b/myenv/lib/python3.10/site-packages/pip/__init__.py new file mode 100644 index 000000000..8a50472b2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/__init__.py @@ -0,0 +1,13 @@ +from typing import List, Optional + +__version__ = "22.0.2" + + +def main(args: Optional[List[str]] = None) -> int: + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/myenv/lib/python3.10/site-packages/pip/__main__.py b/myenv/lib/python3.10/site-packages/pip/__main__.py new file mode 100644 index 000000000..fe34a7b77 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/__main__.py @@ -0,0 +1,31 @@ +import os +import sys +import warnings + +# Remove '' and current working directory from the first entry +# of sys.path, if present to avoid using current directory +# in pip commands check, freeze, install, list and show, +# when invoked as python -m pip +if sys.path[0] in ("", os.getcwd()): + sys.path.pop(0) + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == "": + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +if __name__ == "__main__": + # Work around the error reported in #9540, pending a proper fix. + # Note: It is essential the warning filter is set *before* importing + # pip, as the deprecation happens at import time, not runtime. + warnings.filterwarnings( + "ignore", category=DeprecationWarning, module=".*packaging\\.version" + ) + from pip._internal.cli.main import main as _main + + sys.exit(_main()) diff --git a/myenv/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..588ebf47f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 000000000..4926e3e5e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/__init__.py new file mode 100644 index 000000000..6afb5c627 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/__init__.py @@ -0,0 +1,19 @@ +from typing import List, Optional + +import pip._internal.utils.inject_securetransport # noqa +from pip._internal.utils import _log + +# init_logging() must be called before any call to logging.getLogger() +# which happens at import of most modules. +_log.init_logging() + + +def main(args: (Optional[List[str]]) = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..74cfe324f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc new file mode 100644 index 000000000..15da1d737 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc new file mode 100644 index 000000000..6db8725e4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc new file mode 100644 index 000000000..b96e3b54a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 000000000..7357bf8d8 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc new file mode 100644 index 000000000..e8973494c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc new file mode 100644 index 000000000..b34c6bd9d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc new file mode 100644 index 000000000..199391be9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc new file mode 100644 index 000000000..bc045de93 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/build_env.py b/myenv/lib/python3.10/site-packages/pip/_internal/build_env.py new file mode 100644 index 000000000..daeb7fbc8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/build_env.py @@ -0,0 +1,296 @@ +"""Build Environment used for isolation during sdist building +""" + +import contextlib +import logging +import os +import pathlib +import sys +import textwrap +import zipfile +from collections import OrderedDict +from sysconfig import get_paths +from types import TracebackType +from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type + +from pip._vendor.certifi import where +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.version import Version + +from pip import __file__ as pip_location +from pip._internal.cli.spinners import open_spinner +from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib +from pip._internal.metadata import get_environment +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds + +if TYPE_CHECKING: + from pip._internal.index.package_finder import PackageFinder + +logger = logging.getLogger(__name__) + + +class _Prefix: + def __init__(self, path: str) -> None: + self.path = path + self.setup = False + self.bin_dir = get_paths( + "nt" if os.name == "nt" else "posix_prefix", + vars={"base": path, "platbase": path}, + )["scripts"] + self.lib_dirs = get_prefixed_libs(path) + + +@contextlib.contextmanager +def _create_standalone_pip() -> Iterator[str]: + """Create a "standalone pip" zip file. + + The zip file's content is identical to the currently-running pip. + It will be used to install requirements into the build environment. + """ + source = pathlib.Path(pip_location).resolve().parent + + # Return the current instance if `source` is not a directory. We can't build + # a zip from this, and it likely means the instance is already standalone. + if not source.is_dir(): + yield str(source) + return + + with TempDirectory(kind="standalone-pip") as tmp_dir: + pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip") + kwargs = {} + if sys.version_info >= (3, 8): + kwargs["strict_timestamps"] = False + with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf: + for child in source.rglob("*"): + zf.write(child, child.relative_to(source.parent).as_posix()) + yield os.path.join(pip_zip, "pip") + + +class BuildEnvironment: + """Creates and manages an isolated environment to install build deps""" + + def __init__(self) -> None: + temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True) + + self._prefixes = OrderedDict( + (name, _Prefix(os.path.join(temp_dir.path, name))) + for name in ("normal", "overlay") + ) + + self._bin_dirs: List[str] = [] + self._lib_dirs: List[str] = [] + for prefix in reversed(list(self._prefixes.values())): + self._bin_dirs.append(prefix.bin_dir) + self._lib_dirs.extend(prefix.lib_dirs) + + # Customize site to: + # - ensure .pth files are honored + # - prevent access to system site packages + system_sites = { + os.path.normcase(site) for site in (get_purelib(), get_platlib()) + } + self._site_dir = os.path.join(temp_dir.path, "site") + if not os.path.exists(self._site_dir): + os.mkdir(self._site_dir) + with open( + os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8" + ) as fp: + fp.write( + textwrap.dedent( + """ + import os, site, sys + + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in {system_sites!r}: + site.addsitedir(path, known_paths=known_paths) + system_paths = set( + os.path.normcase(path) + for path in sys.path[len(original_sys_path):] + ) + original_sys_path = [ + path for path in original_sys_path + if os.path.normcase(path) not in system_paths + ] + sys.path = original_sys_path + + # Second, add lib directories. + # ensuring .pth file are processed. + for path in {lib_dirs!r}: + assert not path in sys.path + site.addsitedir(path) + """ + ).format(system_sites=system_sites, lib_dirs=self._lib_dirs) + ) + + def __enter__(self) -> None: + self._save_env = { + name: os.environ.get(name, None) + for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH") + } + + path = self._bin_dirs[:] + old_path = self._save_env["PATH"] + if old_path: + path.extend(old_path.split(os.pathsep)) + + pythonpath = [self._site_dir] + + os.environ.update( + { + "PATH": os.pathsep.join(path), + "PYTHONNOUSERSITE": "1", + "PYTHONPATH": os.pathsep.join(pythonpath), + } + ) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + for varname, old_value in self._save_env.items(): + if old_value is None: + os.environ.pop(varname, None) + else: + os.environ[varname] = old_value + + def check_requirements( + self, reqs: Iterable[str] + ) -> Tuple[Set[Tuple[str, str]], Set[str]]: + """Return 2 sets: + - conflicting requirements: set of (installed, wanted) reqs tuples + - missing requirements: set of reqs + """ + missing = set() + conflicting = set() + if reqs: + env = get_environment(self._lib_dirs) + for req_str in reqs: + req = Requirement(req_str) + dist = env.get_distribution(req.name) + if not dist: + missing.add(req_str) + continue + if isinstance(dist.version, Version): + installed_req_str = f"{req.name}=={dist.version}" + else: + installed_req_str = f"{req.name}==={dist.version}" + if dist.version not in req.specifier: + conflicting.add((installed_req_str, req_str)) + # FIXME: Consider direct URL? + return conflicting, missing + + def install_requirements( + self, + finder: "PackageFinder", + requirements: Iterable[str], + prefix_as_string: str, + *, + kind: str, + ) -> None: + prefix = self._prefixes[prefix_as_string] + assert not prefix.setup + prefix.setup = True + if not requirements: + return + with contextlib.ExitStack() as ctx: + pip_runnable = ctx.enter_context(_create_standalone_pip()) + self._install_requirements( + pip_runnable, + finder, + requirements, + prefix, + kind=kind, + ) + + @staticmethod + def _install_requirements( + pip_runnable: str, + finder: "PackageFinder", + requirements: Iterable[str], + prefix: _Prefix, + *, + kind: str, + ) -> None: + args: List[str] = [ + sys.executable, + pip_runnable, + "install", + "--ignore-installed", + "--no-user", + "--prefix", + prefix.path, + "--no-warn-script-location", + ] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append("-v") + for format_control in ("no_binary", "only_binary"): + formats = getattr(finder.format_control, format_control) + args.extend( + ( + "--" + format_control.replace("_", "-"), + ",".join(sorted(formats or {":none:"})), + ) + ) + + index_urls = finder.index_urls + if index_urls: + args.extend(["-i", index_urls[0]]) + for extra_index in index_urls[1:]: + args.extend(["--extra-index-url", extra_index]) + else: + args.append("--no-index") + for link in finder.find_links: + args.extend(["--find-links", link]) + + for host in finder.trusted_hosts: + args.extend(["--trusted-host", host]) + if finder.allow_all_prereleases: + args.append("--pre") + if finder.prefer_binary: + args.append("--prefer-binary") + args.append("--") + args.extend(requirements) + extra_environ = {"_PIP_STANDALONE_CERT": where()} + with open_spinner(f"Installing {kind}") as spinner: + call_subprocess( + args, + command_desc=f"pip subprocess to install {kind}", + spinner=spinner, + extra_environ=extra_environ, + ) + + +class NoOpBuildEnvironment(BuildEnvironment): + """A no-op drop-in replacement for BuildEnvironment""" + + def __init__(self) -> None: + pass + + def __enter__(self) -> None: + pass + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def cleanup(self) -> None: + pass + + def install_requirements( + self, + finder: "PackageFinder", + requirements: Iterable[str], + prefix_as_string: str, + *, + kind: str, + ) -> None: + raise NotImplementedError() diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cache.py b/myenv/lib/python3.10/site-packages/pip/_internal/cache.py new file mode 100644 index 000000000..1d6df2201 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cache.py @@ -0,0 +1,264 @@ +"""Cache Management +""" + +import hashlib +import json +import logging +import os +from typing import Any, Dict, List, Optional, Set + +from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.urls import path_to_url + +logger = logging.getLogger(__name__) + + +def _hash_dict(d: Dict[str, str]) -> str: + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + +class Cache: + """An abstract class - provides cache directories for data from links + + + :param cache_dir: The root of the cache. + :param format_control: An object of FormatControl class to limit + binaries being read from the cache. + :param allowed_formats: which formats of files the cache should store. + ('binary' and 'source' are the only allowed values) + """ + + def __init__( + self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str] + ) -> None: + super().__init__() + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None + self.format_control = format_control + self.allowed_formats = allowed_formats + + _valid_formats = {"source", "binary"} + assert self.allowed_formats.union(_valid_formats) == _valid_formats + + def _get_cache_path_parts(self, link: Link) -> List[str]: + """Get parts of part that must be os.path.joined with cache_dir""" + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: + can_not_cache = not self.cache_dir or not canonical_package_name or not link + if can_not_cache: + return [] + + formats = self.format_control.get_allowed_formats(canonical_package_name) + if not self.allowed_formats.intersection(formats): + return [] + + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + return candidates + + def get_path_for_link(self, link: Link) -> str: + """Return a directory to store cached items in for link.""" + raise NotImplementedError() + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + """Returns a link to a cached item if it exists, otherwise returns the + passed link. + """ + raise NotImplementedError() + + +class SimpleWheelCache(Cache): + """A cache of wheels for future installs.""" + + def __init__(self, cache_dir: str, format_control: FormatControl) -> None: + super().__init__(cache_dir, format_control, {"binary"}) + + def get_path_for_link(self, link: Link) -> str: + """Return a directory to store cached wheels for link + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were + not unique. E.g. ./package might have dozens of installs done for it + and build a version of 0.0...and if we built and cached a wheel, we'd + end up using the same wheel even if the source has been edited. + + :param link: The link of the sdist for which this will cache wheels. + """ + parts = self._get_cache_path_parts(link) + assert self.cache_dir + # Store wheels within the root cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + candidates = [] + + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name): + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel %s for %s as it " + "does not match the expected distribution name %s.", + wheel_name, + link, + package_name, + ) + continue + if not wheel.supported(supported_tags): + # Built for a different python/arch/etc + continue + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) + + if not candidates: + return link + + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) + + +class EphemWheelCache(SimpleWheelCache): + """A SimpleWheelCache that creates it's own temporary cache directory""" + + def __init__(self, format_control: FormatControl) -> None: + self._temp_dir = TempDirectory( + kind=tempdir_kinds.EPHEM_WHEEL_CACHE, + globally_managed=True, + ) + + super().__init__(self._temp_dir.path, format_control) + + +class CacheEntry: + def __init__( + self, + link: Link, + persistent: bool, + ): + self.link = link + self.persistent = persistent + + +class WheelCache(Cache): + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache + + This Cache allows for gracefully degradation, using the ephem wheel cache + when a certain link is not found in the simple wheel cache first. + """ + + def __init__(self, cache_dir: str, format_control: FormatControl) -> None: + super().__init__(cache_dir, format_control, {"binary"}) + self._wheel_cache = SimpleWheelCache(cache_dir, format_control) + self._ephem_cache = EphemWheelCache(format_control) + + def get_path_for_link(self, link: Link) -> str: + return self._wheel_cache.get_path_for_link(link) + + def get_ephem_path_for_link(self, link: Link) -> str: + return self._ephem_cache.get_path_for_link(link) + + def get( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Link: + cache_entry = self.get_cache_entry(link, package_name, supported_tags) + if cache_entry is None: + return link + return cache_entry.link + + def get_cache_entry( + self, + link: Link, + package_name: Optional[str], + supported_tags: List[Tag], + ) -> Optional[CacheEntry]: + """Returns a CacheEntry with a link to a cached item if it exists or + None. The cache entry indicates if the item was found in the persistent + or ephemeral cache. + """ + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=True) + + retval = self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return CacheEntry(retval, persistent=False) + + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__init__.py new file mode 100644 index 000000000..e589bb917 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__init__.py @@ -0,0 +1,4 @@ +"""Subpackage containing all of pip's command line interface related code +""" + +# This file intentionally does not import submodules diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..73c8b858f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc new file mode 100644 index 000000000..11adc667d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc new file mode 100644 index 000000000..96a4317bf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc new file mode 100644 index 000000000..cdf8982ea Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc new file mode 100644 index 000000000..d898cd9ba Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc new file mode 100644 index 000000000..2d35c2255 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc new file mode 100644 index 000000000..0fbdc7ddc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc new file mode 100644 index 000000000..5a0a6564e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc new file mode 100644 index 000000000..2fde82e48 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc new file mode 100644 index 000000000..98f865b65 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc new file mode 100644 index 000000000..456018228 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc new file mode 100644 index 000000000..402e05e08 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py new file mode 100644 index 000000000..226fe84dc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py @@ -0,0 +1,171 @@ +"""Logic that powers autocompletion installed by ``pip completion``. +""" + +import optparse +import os +import sys +from itertools import chain +from typing import Any, Iterable, List, Optional + +from pip._internal.cli.main_parser import create_main_parser +from pip._internal.commands import commands_dict, create_command +from pip._internal.metadata import get_default_environment + + +def autocomplete() -> None: + """Entry Point for completion of main and subcommand options.""" + # Don't complete if user hasn't sourced bash_completion file. + if "PIP_AUTO_COMPLETE" not in os.environ: + return + cwords = os.environ["COMP_WORDS"].split()[1:] + cword = int(os.environ["COMP_CWORD"]) + try: + current = cwords[cword - 1] + except IndexError: + current = "" + + parser = create_main_parser() + subcommands = list(commands_dict) + options = [] + + # subcommand + subcommand_name: Optional[str] = None + for word in cwords: + if word in subcommands: + subcommand_name = word + break + # subcommand options + if subcommand_name is not None: + # special case: 'help' subcommand has no options + if subcommand_name == "help": + sys.exit(1) + # special case: list locally installed dists for show and uninstall + should_list_installed = not current.startswith("-") and subcommand_name in [ + "show", + "uninstall", + ] + if should_list_installed: + env = get_default_environment() + lc = current.lower() + installed = [ + dist.canonical_name + for dist in env.iter_installed_distributions(local_only=True) + if dist.canonical_name.startswith(lc) + and dist.canonical_name not in cwords[1:] + ] + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + should_list_installables = ( + not current.startswith("-") and subcommand_name == "install" + ) + if should_list_installables: + for path in auto_complete_paths(current, "path"): + print(path) + sys.exit(1) + + subcommand = create_command(subcommand_name) + + for opt in subcommand.parser.option_list_all: + if opt.help != optparse.SUPPRESS_HELP: + for opt_str in opt._long_opts + opt._short_opts: + options.append((opt_str, opt.nargs)) + + # filter out previously specified options from available options + prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + # get completion type given cwords and available subcommand options + completion_type = get_path_completion_type( + cwords, + cword, + subcommand.parser.option_list_all, + ) + # get completion files and directories if ``completion_type`` is + # ````, ```` or ```` + if completion_type: + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1] and option[0][:2] == "--": + opt_label += "=" + print(opt_label) + else: + # show main parser options only when necessary + + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + flattened_opts = chain.from_iterable(opts) + if current.startswith("-"): + for opt in flattened_opts: + if opt.help != optparse.SUPPRESS_HELP: + subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, flattened_opts) + if completion_type: + subcommands = list(auto_complete_paths(current, completion_type)) + + print(" ".join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def get_path_completion_type( + cwords: List[str], cword: int, opts: Iterable[Any] +) -> Optional[str]: + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith("-"): + return None + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split("/"): + if cwords[cword - 2].split("=")[0] == o: + if not opt.metavar or any( + x in ("path", "file", "dir") for x in opt.metavar.split("/") + ): + return opt.metavar + return None + + +def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]: + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(``file``, ``path`` or ``dir``) + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = ( + x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename) + ) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ```` after option + # complete directories when there is ````, ```` or + # ````after option + if completion_type != "dir" and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, "") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/base_command.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/base_command.py new file mode 100644 index 000000000..f5dc0fecf --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/base_command.py @@ -0,0 +1,220 @@ +"""Base Command class, and related routines""" + +import functools +import logging +import logging.config +import optparse +import os +import sys +import traceback +from optparse import Values +from typing import Any, Callable, List, Optional, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.cli.status_codes import ( + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.exceptions import ( + BadCommand, + CommandError, + DiagnosticPipError, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + UninstallationError, +) +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry +from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = ["Command"] + +logger = logging.getLogger(__name__) + + +class Command(CommandContextMixIn): + usage: str = "" + ignore_require_venv: bool = False + + def __init__(self, name: str, summary: str, isolated: bool = False) -> None: + super().__init__() + + self.name = name + self.summary = summary + self.parser = ConfigOptionParser( + usage=self.usage, + prog=f"{get_prog()} {name}", + formatter=UpdatingDefaultsHelpFormatter(), + add_help_option=False, + name=name, + description=self.__doc__, + isolated=isolated, + ) + + self.tempdir_registry: Optional[TempDirRegistry] = None + + # Commands should add options to this option group + optgroup_name = f"{self.name.capitalize()} Options" + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + self.add_options() + + def add_options(self) -> None: + pass + + def handle_pip_version_check(self, options: Values) -> None: + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, "no_index") + + def run(self, options: Values, args: List[str]) -> int: + raise NotImplementedError + + def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]: + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args: List[str]) -> int: + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args: List[str]) -> int: + # We must initialize this before the tempdir manager, otherwise the + # configuration would not be accessible by the time we clean up the + # tempdir manager. + self.tempdir_registry = self.enter_context(tempdir_registry()) + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + + level_number = setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ["PIP_NO_INPUT"] = "1" + + if options.exists_action: + os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical("Could not find an activated virtualenv (required).") + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you should " + "use sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + if "2020-resolver" in options.features_enabled: + logger.warning( + "--use-feature=2020-resolver no longer has any effect, " + "since it is now the default dependency resolver in pip. " + "This will become an error in pip 21.0." + ) + + def intercepts_unhandled_exc( + run_func: Callable[..., int] + ) -> Callable[..., int]: + @functools.wraps(run_func) + def exc_logging_wrapper(*args: Any) -> int: + try: + status = run_func(*args) + assert isinstance(status, int) + return status + except DiagnosticPipError as exc: + logger.error("[present-diagnostic] %s", exc) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except ( + InstallationError, + UninstallationError, + BadCommand, + NetworkConnectionError, + ) as exc: + logger.critical(str(exc)) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical("%s", exc) + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to + # stderr because stdout no longer works. + print("ERROR: Pipe to stdout was broken", file=sys.stderr) + if level_number <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + + return ERROR + except KeyboardInterrupt: + logger.critical("Operation cancelled by user") + logger.debug("Exception information:", exc_info=True) + + return ERROR + except BaseException: + logger.critical("Exception:", exc_info=True) + + return UNKNOWN_ERROR + + return exc_logging_wrapper + + try: + if not options.debug_mode: + run = intercepts_unhandled_exc(self.run) + else: + run = self.run + return run(options, args) + finally: + self.handle_pip_version_check(options) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py new file mode 100644 index 000000000..b7e54f7c6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py @@ -0,0 +1,1018 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os +import textwrap +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values +from textwrap import dedent +from typing import Any, Callable, Dict, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.parser import ConfigOptionParser +from pip._internal.cli.progress_bars import BAR_TYPES +from pip._internal.exceptions import CommandError +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix +from pip._internal.models.format_control import FormatControl +from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.misc import strtobool + +logger = logging.getLogger(__name__) + + +def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None: + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = f"{option} error: {msg}" + msg = textwrap.fill(" ".join(msg.split())) + parser.error(msg) + + +def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup: + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group["name"]) + for option in group["options"]: + option_group.add_option(option()) + return option_group + + +def check_install_build_global( + options: Values, check_options: Optional[Values] = None +) -> None: + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n: str) -> Optional[Any]: + return getattr(check_options, n, None) + + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + control.disallow_binaries() + logger.warning( + "Disabling all use of wheels due to the use of --build-option " + "/ --global-option / --install-option.", + ) + + +def check_dist_restriction(options: Values, check_target: bool = False) -> None: + """Function for determining if custom platform options are allowed. + + :param options: The OptionParser options. + :param check_target: Whether or not to check if --target is being used. + """ + dist_restriction_set = any( + [ + options.python_version, + options.platforms, + options.abis, + options.implementation, + ] + ) + + binary_only = FormatControl(set(), {":all:"}) + sdist_dependencies_allowed = ( + options.format_control != binary_only and not options.ignore_dependencies + ) + + # Installations or downloads using dist restrictions must not combine + # source distributions and dist-specific wheels, as they are not + # guaranteed to be locally compatible. + if dist_restriction_set and sdist_dependencies_allowed: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + if check_target: + if dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target'" + ) + + +def _path_option_check(option: Option, opt: str, value: str) -> str: + return os.path.expanduser(value) + + +def _package_name_option_check(option: Option, opt: str, value: str) -> str: + return canonicalize_name(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path", "package_name") + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["package_name"] = _package_name_option_check + TYPE_CHECKER["path"] = _path_option_check + + +########### +# options # +########### + +help_: Callable[..., Option] = partial( + Option, + "-h", + "--help", + dest="help", + action="help", + help="Show help.", +) + +debug_mode: Callable[..., Option] = partial( + Option, + "--debug", + dest="debug_mode", + action="store_true", + default=False, + help=( + "Let unhandled exceptions propagate outside the main subroutine, " + "instead of logging them to stderr." + ), +) + +isolated_mode: Callable[..., Option] = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv: Callable[..., Option] = partial( + Option, + "--require-virtualenv", + "--require-venv", + dest="require_venv", + action="store_true", + default=False, + help=( + "Allow pip to only run in a virtual environment; " + "exit with an error otherwise." + ), +) + +verbose: Callable[..., Option] = partial( + Option, + "-v", + "--verbose", + dest="verbose", + action="count", + default=0, + help="Give more output. Option is additive, and can be used up to 3 times.", +) + +no_color: Callable[..., Option] = partial( + Option, + "--no-color", + dest="no_color", + action="store_true", + default=False, + help="Suppress colored output.", +) + +version: Callable[..., Option] = partial( + Option, + "-V", + "--version", + dest="version", + action="store_true", + help="Show version and exit.", +) + +quiet: Callable[..., Option] = partial( + Option, + "-q", + "--quiet", + dest="quiet", + action="count", + default=0, + help=( + "Give less output. Option is additive, and can be used up to 3" + " times (corresponding to WARNING, ERROR, and CRITICAL logging" + " levels)." + ), +) + +progress_bar: Callable[..., Option] = partial( + Option, + "--progress-bar", + dest="progress_bar", + type="choice", + choices=list(BAR_TYPES.keys()), + default="on", + help=( + "Specify type of progress to be displayed [" + + "|".join(BAR_TYPES.keys()) + + "] (default: %default)" + ), +) + +log: Callable[..., Option] = partial( + PipOption, + "--log", + "--log-file", + "--local-log", + dest="log", + metavar="path", + type="path", + help="Path to a verbose appending log.", +) + +no_input: Callable[..., Option] = partial( + Option, + # Don't ask for input + "--no-input", + dest="no_input", + action="store_true", + default=False, + help="Disable prompting for input.", +) + +proxy: Callable[..., Option] = partial( + Option, + "--proxy", + dest="proxy", + type="str", + default="", + help="Specify a proxy in the form [user:passwd@]proxy.server:port.", +) + +retries: Callable[..., Option] = partial( + Option, + "--retries", + dest="retries", + type="int", + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).", +) + +timeout: Callable[..., Option] = partial( + Option, + "--timeout", + "--default-timeout", + metavar="sec", + dest="timeout", + type="float", + default=15, + help="Set the socket timeout (default %default seconds).", +) + + +def exists_action() -> Option: + return Option( + # Option when path already exist + "--exists-action", + dest="exists_action", + type="choice", + choices=["s", "i", "w", "b", "a"], + default=[], + action="append", + metavar="action", + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", + ) + + +cert: Callable[..., Option] = partial( + PipOption, + "--cert", + dest="cert", + type="path", + metavar="path", + help=( + "Path to PEM-encoded CA certificate bundle. " + "If provided, overrides the default. " + "See 'SSL Certificate Verification' in pip documentation " + "for more information." + ), +) + +client_cert: Callable[..., Option] = partial( + PipOption, + "--client-cert", + dest="client_cert", + type="path", + default=None, + metavar="path", + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) + +index_url: Callable[..., Option] = partial( + Option, + "-i", + "--index-url", + "--pypi-url", + dest="index_url", + metavar="URL", + default=PyPI.simple_url, + help="Base URL of the Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) + + +def extra_index_url() -> Option: + return Option( + "--extra-index-url", + dest="extra_index_urls", + metavar="URL", + action="append", + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index: Callable[..., Option] = partial( + Option, + "--no-index", + dest="no_index", + action="store_true", + default=False, + help="Ignore package index (only looking at --find-links URLs instead).", +) + + +def find_links() -> Option: + return Option( + "-f", + "--find-links", + dest="find_links", + action="append", + default=[], + metavar="url", + help="If a URL or path to an html file, then parse for links to " + "archives such as sdist (.tar.gz) or wheel (.whl) files. " + "If a local path or file:// URL that's a directory, " + "then look for archives in the directory listing. " + "Links to VCS project URLs are not supported.", + ) + + +def trusted_host() -> Option: + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", + ) + + +def constraints() -> Option: + return Option( + "-c", + "--constraint", + dest="constraints", + action="append", + default=[], + metavar="file", + help="Constrain versions using the given constraints file. " + "This option can be used multiple times.", + ) + + +def requirements() -> Option: + return Option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help="Install from the given requirements file. " + "This option can be used multiple times.", + ) + + +def editable() -> Option: + return Option( + "-e", + "--editable", + dest="editables", + action="append", + default=[], + metavar="path/url", + help=( + "Install a project in editable mode (i.e. setuptools " + '"develop mode") from a local project path or a VCS url.' + ), + ) + + +def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +src: Callable[..., Option] = partial( + PipOption, + "--src", + "--source", + "--source-dir", + "--source-directory", + dest="src_dir", + type="path", + metavar="dir", + default=get_src_prefix(), + action="callback", + callback=_handle_src, + help="Directory to check out editable projects into. " + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".', +) + + +def _get_format_control(values: Values, option: Option) -> Any: + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.no_binary, + existing.only_binary, + ) + + +def _handle_only_binary( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, + existing.only_binary, + existing.no_binary, + ) + + +def no_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--no-binary", + dest="format_control", + action="callback", + callback=_handle_no_binary, + type="str", + default=format_control, + help="Do not use binary packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all binary packages, ":none:" to empty the set (notice ' + "the colons), or one or more package names with commas between " + "them (no colons). Note that some packages are tricky to compile " + "and may fail to install when this option is used on them.", + ) + + +def only_binary() -> Option: + format_control = FormatControl(set(), set()) + return Option( + "--only-binary", + dest="format_control", + action="callback", + callback=_handle_only_binary, + type="str", + default=format_control, + help="Do not use source packages. Can be supplied multiple times, and " + 'each time adds to the existing value. Accepts either ":all:" to ' + 'disable all source packages, ":none:" to empty the set, or one ' + "or more package names with commas between them. Packages " + "without binary distributions will fail to install when this " + "option is used on them.", + ) + + +platforms: Callable[..., Option] = partial( + Option, + "--platform", + dest="platforms", + metavar="platform", + action="append", + default=None, + help=( + "Only use wheels compatible with . Defaults to the " + "platform of the running system. Use this option multiple times to " + "specify multiple platforms supported by the target interpreter." + ), +) + + +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]: + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split(".") + if len(parts) > 3: + return ((), "at most three version parts are allowed") + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), "each version part must be an integer") + + return (version_info, None) + + +def _handle_python_version( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = "invalid --python-version value: {!r}: {}".format( + value, + error_msg, + ) + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + +python_version: Callable[..., Option] = partial( + Option, + "--python-version", + dest="python_version", + metavar="python_version", + action="callback", + callback=_handle_python_version, + type="str", + default=None, + help=dedent( + """\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """ + ), +) + + +implementation: Callable[..., Option] = partial( + Option, + "--implementation", + dest="implementation", + metavar="implementation", + default=None, + help=( + "Only use wheels compatible with Python " + "implementation , e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels." + ), +) + + +abis: Callable[..., Option] = partial( + Option, + "--abi", + dest="abis", + metavar="abi", + action="append", + default=None, + help=( + "Only use wheels compatible with Python abi , e.g. 'pypy_41'. " + "If not specified, then the current interpreter abi tag is used. " + "Use this option multiple times to specify multiple abis supported " + "by the target interpreter. Generally you will need to specify " + "--implementation, --platform, and --python-version when using this " + "option." + ), +) + + +def add_target_python_options(cmd_opts: OptionGroup) -> None: + cmd_opts.add_option(platforms()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abis()) + + +def make_target_python(options: Values) -> TargetPython: + target_python = TargetPython( + platforms=options.platforms, + py_version_info=options.python_version, + abis=options.abis, + implementation=options.implementation, + ) + + return target_python + + +def prefer_binary() -> Option: + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help="Prefer older binary packages over newer source packages.", + ) + + +cache_dir: Callable[..., Option] = partial( + PipOption, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + type="path", + help="Store the cache data in .", +) + + +def _handle_no_cache_dir( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-cache-dir option. + + This is an optparse.Option callback for the --no-cache-dir option. + """ + # The value argument will be None if --no-cache-dir is passed via the + # command-line, since the option doesn't accept arguments. However, + # the value can be non-None if the option is triggered e.g. by an + # environment variable, like PIP_NO_CACHE_DIR=true. + if value is not None: + # Then parse the string value to get argument error-checking. + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) + + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() + # converted to 0 (like "false" or "no") caused cache_dir to be disabled + # rather than enabled (logic would say the latter). Thus, we disable + # the cache directory not just on values that parse to True, but (for + # backwards compatibility reasons) also on values that parse to False. + # In other words, always set it to False if the option is provided in + # some (valid) form. + parser.values.cache_dir = False + + +no_cache: Callable[..., Option] = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="callback", + callback=_handle_no_cache_dir, + help="Disable the cache.", +) + +no_deps: Callable[..., Option] = partial( + Option, + "--no-deps", + "--no-dependencies", + dest="ignore_dependencies", + action="store_true", + default=False, + help="Don't install package dependencies.", +) + +ignore_requires_python: Callable[..., Option] = partial( + Option, + "--ignore-requires-python", + dest="ignore_requires_python", + action="store_true", + help="Ignore the Requires-Python information.", +) + +no_build_isolation: Callable[..., Option] = partial( + Option, + "--no-build-isolation", + dest="build_isolation", + action="store_false", + default=True, + help="Disable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already installed " + "if this option is used.", +) + + +def _handle_no_use_pep517( + option: Option, opt: str, value: str, parser: OptionParser +) -> None: + """ + Process a value provided for the --no-use-pep517 option. + + This is an optparse.Option callback for the no_use_pep517 option. + """ + # Since --no-use-pep517 doesn't accept arguments, the value argument + # will be None if --no-use-pep517 is passed via the command-line. + # However, the value can be non-None if the option is triggered e.g. + # by an environment variable, for example "PIP_NO_USE_PEP517=true". + if value is not None: + msg = """A value was passed for --no-use-pep517, + probably using either the PIP_NO_USE_PEP517 environment variable + or the "no-use-pep517" config file option. Use an appropriate value + of the PIP_USE_PEP517 environment variable or the "use-pep517" + config file option instead. + """ + raise_option_error(parser, option=option, msg=msg) + + # Otherwise, --no-use-pep517 was passed via the command-line. + parser.values.use_pep517 = False + + +use_pep517: Any = partial( + Option, + "--use-pep517", + dest="use_pep517", + action="store_true", + default=None, + help="Use PEP 517 for building source distributions " + "(use --no-use-pep517 to force legacy behaviour).", +) + +no_use_pep517: Any = partial( + Option, + "--no-use-pep517", + dest="use_pep517", + action="callback", + callback=_handle_no_use_pep517, + default=None, + help=SUPPRESS_HELP, +) + +install_options: Callable[..., Option] = partial( + Option, + "--install-option", + dest="install_options", + action="append", + metavar="options", + help="Extra arguments to be supplied to the setup.py install " + 'command (use like --install-option="--install-scripts=/usr/local/' + 'bin"). Use multiple --install-option options to pass multiple ' + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.", +) + +build_options: Callable[..., Option] = partial( + Option, + "--build-option", + dest="build_options", + metavar="options", + action="append", + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", +) + +global_options: Callable[..., Option] = partial( + Option, + "--global-option", + dest="global_options", + action="append", + metavar="options", + help="Extra global options to be supplied to the setup.py " + "call before the install or bdist_wheel command.", +) + +no_clean: Callable[..., Option] = partial( + Option, + "--no-clean", + action="store_true", + default=False, + help="Don't clean up build directories.", +) + +pre: Callable[..., Option] = partial( + Option, + "--pre", + action="store_true", + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) + +disable_pip_version_check: Callable[..., Option] = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=True, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) + + +def _handle_merge_hash( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(":", 1) + except ValueError: + parser.error( + "Arguments to {} must be a hash name " # noqa + "followed by a value, like --hash=sha256:" + "abcde...".format(opt_str) + ) + if algo not in STRONG_HASHES: + parser.error( + "Allowed hash algorithms for {} are {}.".format( # noqa + opt_str, ", ".join(STRONG_HASHES) + ) + ) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash: Callable[..., Option] = partial( + Option, + "--hash", + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest="hashes", + action="callback", + callback=_handle_merge_hash, + type="string", + help="Verify that the package's archive matches this " + "hash before installing. Example: --hash=sha256:abcdef...", +) + + +require_hashes: Callable[..., Option] = partial( + Option, + "--require-hashes", + dest="require_hashes", + action="store_true", + default=False, + help="Require a hash to check each requirement against, for " + "repeatable installs. This option is implied when any package in a " + "requirements file has a --hash option.", +) + + +list_path: Callable[..., Option] = partial( + PipOption, + "--path", + dest="path", + type="path", + action="append", + help="Restrict to the specified installation path for listing " + "packages (can be used multiple times).", +) + + +def check_list_path_option(options: Values) -> None: + if options.path and (options.user or options.local): + raise CommandError("Cannot combine '--path' with '--user' or '--local'") + + +list_exclude: Callable[..., Option] = partial( + PipOption, + "--exclude", + dest="excludes", + action="append", + metavar="package", + type="package_name", + help="Exclude specified package from the output", +) + + +no_python_version_warning: Callable[..., Option] = partial( + Option, + "--no-python-version-warning", + dest="no_python_version_warning", + action="store_true", + default=False, + help="Silence deprecation warnings for upcoming unsupported Pythons.", +) + + +use_new_feature: Callable[..., Option] = partial( + Option, + "--use-feature", + dest="features_enabled", + metavar="feature", + action="append", + default=[], + choices=["2020-resolver", "fast-deps", "in-tree-build"], + help="Enable new functionality, that may be backward incompatible.", +) + +use_deprecated_feature: Callable[..., Option] = partial( + Option, + "--use-deprecated", + dest="deprecated_features_enabled", + metavar="feature", + action="append", + default=[], + choices=[ + "legacy-resolver", + "out-of-tree-build", + "backtrack-on-build-failures", + "html5lib", + ], + help=("Enable deprecated functionality, that will be removed in the future."), +) + + +########## +# groups # +########## + +general_group: Dict[str, Any] = { + "name": "General Options", + "options": [ + help_, + debug_mode, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + no_python_version_warning, + use_new_feature, + use_deprecated_feature, + ], +} + +index_group: Dict[str, Any] = { + "name": "Package Index Options", + "options": [ + index_url, + extra_index_url, + no_index, + find_links, + ], +} diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/command_context.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 000000000..ed6832237 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,27 @@ +from contextlib import ExitStack, contextmanager +from typing import ContextManager, Iterator, TypeVar + +_T = TypeVar("_T", covariant=True) + + +class CommandContextMixIn: + def __init__(self) -> None: + super().__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self) -> Iterator[None]: + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider: ContextManager[_T]) -> _T: + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/main.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/main.py new file mode 100644 index 000000000..0e3122154 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,70 @@ +"""Primary application entrypoint. +""" +import locale +import logging +import os +import sys +from typing import List, Optional + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + + +def main(args: Optional[List[str]] = None) -> int: + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write(f"ERROR: {exc}") + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, "") + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py new file mode 100644 index 000000000..3666ab04c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py @@ -0,0 +1,87 @@ +"""A single place for constructing and exposing the main parser +""" + +import os +import sys +from typing import List, Tuple + +from pip._internal.cli import cmdoptions +from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip._internal.commands import commands_dict, get_similar_commands +from pip._internal.exceptions import CommandError +from pip._internal.utils.misc import get_pip_version, get_prog + +__all__ = ["create_main_parser", "parse_command"] + + +def create_main_parser() -> ConfigOptionParser: + """Creates and returns the main parser for pip's CLI""" + + parser = ConfigOptionParser( + usage="\n%prog [options]", + add_help_option=False, + formatter=UpdatingDefaultsHelpFormatter(), + name="global", + prog=get_prog(), + ) + parser.disable_interspersed_args() + + parser.version = get_pip_version() + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + # so the help formatter knows + parser.main = True # type: ignore + + # create command listing for description + description = [""] + [ + f"{name:27} {command_info.summary}" + for name, command_info in commands_dict.items() + ] + parser.description = "\n".join(description) + + return parser + + +def parse_command(args: List[str]) -> Tuple[str, List[str]]: + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == "help" and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/parser.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/parser.py new file mode 100644 index 000000000..a1c99a8cb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/parser.py @@ -0,0 +1,292 @@ +"""Base option parser setup""" + +import logging +import optparse +import shutil +import sys +import textwrap +from contextlib import suppress +from typing import Any, Dict, Iterator, List, Tuple + +from pip._internal.cli.status_codes import UNKNOWN_ERROR +from pip._internal.configuration import Configuration, ConfigurationError +from pip._internal.utils.misc import redact_auth_from_url, strtobool + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # help position must be aligned with __init__.parseopts.description + kwargs["max_help_position"] = 30 + kwargs["indent_increment"] = 1 + kwargs["width"] = shutil.get_terminal_size()[0] - 2 + super().__init__(*args, **kwargs) + + def format_option_strings(self, option: optparse.Option) -> str: + return self._format_option_strings(option) + + def _format_option_strings( + self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", " + ) -> str: + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + assert option.dest is not None + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt.format(metavar.lower())) + + return "".join(opts) + + def format_heading(self, heading: str) -> str: + if heading == "Options": + return "" + return heading + ":\n" + + def format_usage(self, usage: str) -> str: + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " ")) + return msg + + def format_description(self, description: str) -> str: + # leave full control over description to us + if description: + if hasattr(self.parser, "main"): + label = "Commands" + else: + label = "Description" + # some doc strings have initial newlines, some don't + description = description.lstrip("\n") + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = f"{label}:\n{description}\n" + return description + else: + return "" + + def format_epilog(self, epilog: str) -> str: + # leave full control over epilog to us + if epilog: + return epilog + else: + return "" + + def indent_lines(self, text: str, indent: str) -> str: + new_lines = [indent + line for line in text.split("\n")] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + + Also redact auth from url type options + """ + + def expand_default(self, option: optparse.Option) -> str: + default_values = None + if self.parser is not None: + assert isinstance(self.parser, ConfigOptionParser) + self.parser._update_defaults(self.parser.defaults) + assert option.dest is not None + default_values = self.parser.defaults.get(option.dest) + help_text = super().expand_default(option) + + if default_values and option.metavar == "URL": + if isinstance(default_values, str): + default_values = [default_values] + + # If its not a list, we should abort and just return the help text + if not isinstance(default_values, list): + default_values = [] + + for val in default_values: + help_text = help_text.replace(val, redact_auth_from_url(val)) + + return help_text + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group( + self, idx: int, *args: Any, **kwargs: Any + ) -> optparse.OptionGroup: + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self) -> List[optparse.Option]: + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__( + self, + *args: Any, + name: str, + isolated: bool = False, + **kwargs: Any, + ) -> None: + self.name = name + self.config = Configuration(isolated) + + assert self.name + super().__init__(*args, **kwargs) + + def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print(f"An error occurred during configuration: {exc}") + sys.exit(3) + + def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]: + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items: Dict[str, List[Tuple[str, Any]]] = { + name: [] for name in override_order + } + for section_key, val in self.config.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as it's value is empty.", + section_key, + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + for key, val in section_items[section]: + yield key, val + + def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option("--" + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + assert option.dest is not None + + if option.action in ("store_true", "store_false"): + try: + val = strtobool(val) + except ValueError: + self.error( + "{} is not a valid value for {} option, " # noqa + "please specify a boolean value like yes/no, " + "true/false or 1/0 instead.".format(val, key) + ) + elif option.action == "count": + with suppress(ValueError): + val = strtobool(val) + with suppress(ValueError): + val = int(val) + if not isinstance(val, int) or val < 0: + self.error( + "{} is not a valid value for {} option, " # noqa + "please instead specify either a non-negative integer " + "or a boolean value like yes/no or false/true " + "which is equivalent to 1/0.".format(val, key) + ) + elif option.action == "append": + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == "callback": + assert option.callback is not None + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self) -> optparse.Values: + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(UNKNOWN_ERROR, str(err)) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + assert option.dest is not None + default = defaults.get(option.dest) + if isinstance(default, str): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg: str) -> None: + self.print_usage(sys.stderr) + self.exit(UNKNOWN_ERROR, f"{msg}\n") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py new file mode 100644 index 000000000..ffa1964fc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py @@ -0,0 +1,321 @@ +import functools +import itertools +import sys +from signal import SIGINT, default_int_handler, signal +from typing import Any, Callable, Iterator, Optional, Tuple + +from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar +from pip._vendor.progress.spinner import Spinner +from pip._vendor.rich.progress import ( + BarColumn, + DownloadColumn, + FileSizeColumn, + Progress, + ProgressColumn, + SpinnerColumn, + TextColumn, + TimeElapsedColumn, + TimeRemainingColumn, + TransferSpeedColumn, +) + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + +DownloadProgressRenderer = Callable[[Iterator[bytes]], Iterator[bytes]] + + +def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar: + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", ""), + getattr(preferred, "fill", ""), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + "".join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar: Any = _select_progress_class(IncrementalBar, Bar) + + +class InterruptibleMixin: + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """ + Save the original SIGINT handler for later. + """ + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self) -> None: + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super().finish() # type: ignore + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): # type: ignore + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + def update(self) -> None: + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = ("\U0001F539", "\U0001F537", "\U0001F535") + + +class DownloadProgressMixin: + def __init__(self, *args: Any, **kwargs: Any) -> None: + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + self.message: str = (" " * (get_indentation() + 2)) + self.message + + @property + def downloaded(self) -> str: + return format_size(self.index) # type: ignore + + @property + def download_speed(self) -> str: + # Avoid zero division errors... + if self.avg == 0.0: # type: ignore + return "..." + return format_size(1 / self.avg) + "/s" # type: ignore + + @property + def pretty_eta(self) -> str: + if self.eta: # type: ignore + return f"eta {self.eta_td}" # type: ignore + return "" + + def iter(self, it): # type: ignore + for x in it: + yield x + # B305 is incorrectly raised here + # https://github.com/PyCQA/flake8-bugbear/issues/59 + self.next(len(x)) # noqa: B305 + self.finish() + + +class WindowsMixin: + def __init__(self, *args: Any, **kwargs: Any) -> None: + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call needs to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: # type: ignore + self.hide_cursor = False + + # https://github.com/python/mypy/issues/5887 + super().__init__(*args, **kwargs) # type: ignore + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) # type: ignore + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar): + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): + pass + + +class DownloadBar(BaseDownloadProgressBar, Bar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar): + pass + + +class DownloadProgressSpinner( + WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner +): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self) -> str: + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self) -> None: + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = "".join( + [ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ] + ) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner), +} + + +def _legacy_progress_bar( + progress_bar: str, max: Optional[int] +) -> DownloadProgressRenderer: + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter # type: ignore + else: + return BAR_TYPES[progress_bar][0](max=max).iter + + +# +# Modern replacement, for our legacy progress bars. +# +def _rich_progress_bar( + iterable: Iterator[bytes], + *, + bar_type: str, + size: int, +) -> Iterator[bytes]: + assert bar_type == "on", "This should only be used in the default mode." + + if not size: + total = float("inf") + columns: Tuple[ProgressColumn, ...] = ( + TextColumn("[progress.description]{task.description}"), + SpinnerColumn("line", speed=1.5), + FileSizeColumn(), + TransferSpeedColumn(), + TimeElapsedColumn(), + ) + else: + total = size + columns = ( + TextColumn("[progress.description]{task.description}"), + BarColumn(), + DownloadColumn(), + TransferSpeedColumn(), + TextColumn("eta"), + TimeRemainingColumn(), + ) + + progress = Progress(*columns, refresh_per_second=30) + task_id = progress.add_task(" " * (get_indentation() + 2), total=total) + with progress: + for chunk in iterable: + yield chunk + progress.update(task_id, advance=len(chunk)) + + +def get_download_progress_renderer( + *, bar_type: str, size: Optional[int] = None +) -> DownloadProgressRenderer: + """Get an object that can be used to render the download progress. + + Returns a callable, that takes an iterable to "wrap". + """ + if bar_type == "on": + return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size) + elif bar_type == "off": + return iter # no-op, when passed an iterator + else: + return _legacy_progress_bar(bar_type, size) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 000000000..5d4d1f0f4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,506 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +import sys +from functools import partial +from optparse import Values +from typing import Any, List, Optional, Tuple + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_parsed_requirement, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.resolution.base import BaseResolver +from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.temp_dir import ( + TempDirectory, + TempDirectoryTypeRegistry, + tempdir_kinds, +) +from pip._internal.utils.virtualenv import running_under_virtualenv + +logger = logging.getLogger(__name__) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + + def __init__(self) -> None: + super().__init__() + self._session: Optional[PipSession] = None + + @classmethod + def _get_index_urls(cls, options: Values) -> Optional[List[str]]: + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options: Values) -> PipSession: + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session( + self, + options: Values, + retries: Optional[int] = None, + timeout: Optional[int] = None, + ) -> PipSession: + assert not options.cache_dir or os.path.isabs(options.cache_dir) + session = PipSession( + cache=( + os.path.join(options.cache_dir, "http") if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = timeout if timeout is not None else options.timeout + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options: Values) -> None: + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, "no_index") + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, retries=0, timeout=min(5, options.timeout) + ) + with session: + pip_self_version_check(session, options) + + +KEEPABLE_TEMPDIR_TYPES = [ + tempdir_kinds.BUILD_ENV, + tempdir_kinds.EPHEM_WHEEL_CACHE, + tempdir_kinds.REQ_BUILD, +] + + +def warn_if_run_as_root() -> None: + """Output a warning for sudo users on Unix. + + In a virtual environment, sudo pip still writes to virtualenv. + On Windows, users may run pip as Administrator without issues. + This warning only applies to Unix root users outside of virtualenv. + """ + if running_under_virtualenv(): + return + if not hasattr(os, "getuid"): + return + # On Windows, there are no "system managed" Python packages. Installing as + # Administrator via pip is the correct way of updating system environments. + # + # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform + # checks: https://mypy.readthedocs.io/en/stable/common_issues.html + if sys.platform == "win32" or sys.platform == "cygwin": + return + + if os.getuid() != 0: + return + + logger.warning( + "Running pip as the 'root' user can result in broken permissions and " + "conflicting behaviour with the system package manager. " + "It is recommended to use a virtual environment instead: " + "https://pip.pypa.io/warnings/venv" + ) + + +def with_cleanup(func: Any) -> Any: + """Decorator for common logic related to managing temporary + directories. + """ + + def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: + for t in KEEPABLE_TEMPDIR_TYPES: + registry.set_delete(t, False) + + def wrapper( + self: RequirementCommand, options: Values, args: List[Any] + ) -> Optional[int]: + assert self.tempdir_registry is not None + if options.no_clean: + configure_tempdir_registry(self.tempdir_registry) + + try: + return func(self, options, args) + except PreviousBuildDirError: + # This kind of conflict can occur when the user passes an explicit + # build directory with a pre-existing folder. In that case we do + # not want to accidentally remove it. + configure_tempdir_registry(self.tempdir_registry) + raise + + return wrapper + + +class RequirementCommand(IndexGroupCommand): + def __init__(self, *args: Any, **kw: Any) -> None: + super().__init__(*args, **kw) + + self.cmd_opts.add_option(cmdoptions.no_clean()) + + @staticmethod + def determine_resolver_variant(options: Values) -> str: + """Determines which resolver should be used, based on the given options.""" + if "legacy-resolver" in options.deprecated_features_enabled: + return "legacy" + + return "2020-resolver" + + @staticmethod + def determine_build_failure_suppression(options: Values) -> bool: + """Determines whether build failures should be suppressed and backtracked on.""" + if "backtrack-on-build-failures" not in options.deprecated_features_enabled: + return False + + if "legacy-resolver" in options.deprecated_features_enabled: + raise CommandError("Cannot backtrack with legacy resolver.") + + deprecated( + reason=( + "Backtracking on build failures can mask issues related to how " + "a package generates metadata or builds a wheel. This flag will " + "be removed in pip 22.2." + ), + gone_in=None, + replacement=( + "avoiding known-bad versions by explicitly telling pip to ignore them " + "(either directly as requirements, or via a constraints file)" + ), + feature_flag=None, + issue=10655, + ) + return True + + @classmethod + def make_requirement_preparer( + cls, + temp_build_dir: TempDirectory, + options: Values, + req_tracker: RequirementTracker, + session: PipSession, + finder: PackageFinder, + use_user_site: bool, + download_dir: Optional[str] = None, + verbosity: int = 0, + ) -> RequirementPreparer: + """ + Create a RequirementPreparer instance for the given parameters. + """ + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + + resolver_variant = cls.determine_resolver_variant(options) + if resolver_variant == "2020-resolver": + lazy_wheel = "fast-deps" in options.features_enabled + if lazy_wheel: + logger.warning( + "pip is using lazily downloaded wheels using HTTP " + "range requests to obtain dependency information. " + "This experimental feature is enabled through " + "--use-feature=fast-deps and it is not ready for " + "production." + ) + else: + lazy_wheel = False + if "fast-deps" in options.features_enabled: + logger.warning( + "fast-deps has no effect when used with the legacy resolver." + ) + + in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled + if "in-tree-build" in options.features_enabled: + deprecated( + reason="In-tree builds are now the default.", + replacement="to remove the --use-feature=in-tree-build flag", + gone_in="22.1", + ) + if "out-of-tree-build" in options.deprecated_features_enabled: + deprecated( + reason="Out-of-tree builds are deprecated.", + replacement=None, + gone_in="22.1", + ) + + if options.progress_bar not in {"on", "off"}: + deprecated( + reason="Custom progress bar styles are deprecated", + replacement="to use the default progress bar style.", + gone_in="22.1", + ) + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + session=session, + progress_bar=options.progress_bar, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + lazy_wheel=lazy_wheel, + verbosity=verbosity, + in_tree_build=in_tree_build, + ) + + @classmethod + def make_resolver( + cls, + preparer: RequirementPreparer, + finder: PackageFinder, + options: Values, + wheel_cache: Optional[WheelCache] = None, + use_user_site: bool = False, + ignore_installed: bool = True, + ignore_requires_python: bool = False, + force_reinstall: bool = False, + upgrade_strategy: str = "to-satisfy-only", + use_pep517: Optional[bool] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> BaseResolver: + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + use_pep517=use_pep517, + ) + suppress_build_failures = cls.determine_build_failure_suppression(options) + resolver_variant = cls.determine_resolver_variant(options) + # The long import name and duplicated invocation is needed to convince + # Mypy into correctly typechecking. Otherwise it would complain the + # "Resolver" class being redefined. + if resolver_variant == "2020-resolver": + import pip._internal.resolution.resolvelib.resolver + + return pip._internal.resolution.resolvelib.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + suppress_build_failures=suppress_build_failures, + ) + import pip._internal.resolution.legacy.resolver + + return pip._internal.resolution.legacy.resolver.Resolver( + preparer=preparer, + finder=finder, + wheel_cache=wheel_cache, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def get_requirements( + self, + args: List[str], + options: Values, + finder: PackageFinder, + session: PipSession, + ) -> List[InstallRequirement]: + """ + Parse command-line arguments into the corresponding requirements. + """ + requirements: List[InstallRequirement] = [] + for filename in options.constraints: + for parsed_req in parse_requirements( + filename, + constraint=True, + finder=finder, + options=options, + session=session, + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + user_supplied=False, + ) + requirements.append(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, + None, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + user_supplied=True, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + ) + requirements.append(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, finder=finder, options=options, session=session + ): + req_to_add = install_req_from_parsed_requirement( + parsed_req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + user_supplied=True, + ) + requirements.append(req_to_add) + + # If any requirement has hash options, enable hash checking. + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {"name": self.name} + if options.find_links: + raise CommandError( + "You must give at least one requirement to {name} " + '(maybe you meant "pip {name} {links}"?)'.format( + **dict(opts, links=" ".join(options.find_links)) + ) + ) + else: + raise CommandError( + "You must give at least one requirement to {name} " + '(see "pip help {name}")'.format(**opts) + ) + + return requirements + + @staticmethod + def trace_basic_info(finder: PackageFinder) -> None: + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = LinkCollector.create(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py new file mode 100644 index 000000000..1e313e109 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py @@ -0,0 +1,157 @@ +import contextlib +import itertools +import logging +import sys +import time +from typing import IO, Iterator + +from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation + +logger = logging.getLogger(__name__) + + +class SpinnerInterface: + def spin(self) -> None: + raise NotImplementedError() + + def finish(self, final_status: str) -> None: + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__( + self, + message: str, + file: IO[str] = None, + spin_chars: str = "-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds: float = 0.125, + ): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status: str) -> None: + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status: str) -> None: + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self) -> None: + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status: str) -> None: + if self._finished: + return + self._update(f"finished with status '{final_status}'") + self._finished = True + + +class RateLimiter: + def __init__(self, min_update_interval_seconds: float) -> None: + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update: float = 0 + + def ready(self) -> bool: + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self) -> None: + self._last_update = time.time() + + +@contextlib.contextmanager +def open_spinner(message: str) -> Iterator[SpinnerInterface]: + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner: SpinnerInterface = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") + + +@contextlib.contextmanager +def hidden_cursor(file: IO[str]) -> Iterator[None]: + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py b/myenv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py new file mode 100644 index 000000000..5e29502cd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py @@ -0,0 +1,6 @@ +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__init__.py new file mode 100644 index 000000000..c72f24f30 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__init__.py @@ -0,0 +1,127 @@ +""" +Package containing all pip commands +""" + +import importlib +from collections import namedtuple +from typing import Any, Dict, Optional + +from pip._internal.cli.base_command import Command + +CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary") + +# This dictionary does a bunch of heavy lifting for help output: +# - Enables avoiding additional (costly) imports for presenting `--help`. +# - The ordering matters for help display. +# +# Even though the module path starts with the same "pip._internal.commands" +# prefix, the full path makes testing easier (specifically when modifying +# `commands_dict` in test setup / teardown). +commands_dict: Dict[str, CommandInfo] = { + "install": CommandInfo( + "pip._internal.commands.install", + "InstallCommand", + "Install packages.", + ), + "download": CommandInfo( + "pip._internal.commands.download", + "DownloadCommand", + "Download packages.", + ), + "uninstall": CommandInfo( + "pip._internal.commands.uninstall", + "UninstallCommand", + "Uninstall packages.", + ), + "freeze": CommandInfo( + "pip._internal.commands.freeze", + "FreezeCommand", + "Output installed packages in requirements format.", + ), + "list": CommandInfo( + "pip._internal.commands.list", + "ListCommand", + "List installed packages.", + ), + "show": CommandInfo( + "pip._internal.commands.show", + "ShowCommand", + "Show information about installed packages.", + ), + "check": CommandInfo( + "pip._internal.commands.check", + "CheckCommand", + "Verify installed packages have compatible dependencies.", + ), + "config": CommandInfo( + "pip._internal.commands.configuration", + "ConfigurationCommand", + "Manage local and global configuration.", + ), + "search": CommandInfo( + "pip._internal.commands.search", + "SearchCommand", + "Search PyPI for packages.", + ), + "cache": CommandInfo( + "pip._internal.commands.cache", + "CacheCommand", + "Inspect and manage pip's wheel cache.", + ), + "index": CommandInfo( + "pip._internal.commands.index", + "IndexCommand", + "Inspect information available from package indexes.", + ), + "wheel": CommandInfo( + "pip._internal.commands.wheel", + "WheelCommand", + "Build wheels from your requirements.", + ), + "hash": CommandInfo( + "pip._internal.commands.hash", + "HashCommand", + "Compute hashes of package archives.", + ), + "completion": CommandInfo( + "pip._internal.commands.completion", + "CompletionCommand", + "A helper command used for command completion.", + ), + "debug": CommandInfo( + "pip._internal.commands.debug", + "DebugCommand", + "Show information useful for debugging.", + ), + "help": CommandInfo( + "pip._internal.commands.help", + "HelpCommand", + "Show help for commands.", + ), +} + + +def create_command(name: str, **kwargs: Any) -> Command: + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command + + +def get_similar_commands(name: str) -> Optional[str]: + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..338e7e8d6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/cache.cpython-310.pyc new file mode 100644 index 000000000..c43368bb6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc new file mode 100644 index 000000000..f590e4b1e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc new file mode 100644 index 000000000..f535da3b3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc new file mode 100644 index 000000000..e405fab23 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc new file mode 100644 index 000000000..1c8988e03 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc new file mode 100644 index 000000000..3baafb76e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc new file mode 100644 index 000000000..bfb3154c5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc new file mode 100644 index 000000000..d1b02d6ad Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc new file mode 100644 index 000000000..bc2091628 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc new file mode 100644 index 000000000..bae60e0a4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc new file mode 100644 index 000000000..97ee6b5ff Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/list.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/list.cpython-310.pyc new file mode 100644 index 000000000..3b6989d44 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/list.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc new file mode 100644 index 000000000..03fae82a5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc new file mode 100644 index 000000000..c59825cbf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc new file mode 100644 index 000000000..d44e1a481 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..a6a120646 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/cache.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/cache.py new file mode 100644 index 000000000..f1a489d32 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/cache.py @@ -0,0 +1,223 @@ +import os +import textwrap +from optparse import Values +from typing import Any, List + +import pip._internal.utils.filesystem as filesystem +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils.logging import getLogger + +logger = getLogger(__name__) + + +class CacheCommand(Command): + """ + Inspect and manage pip's wheel cache. + + Subcommands: + + - dir: Show the cache directory. + - info: Show information about the cache. + - list: List filenames of packages stored in the cache. + - remove: Remove one or more package from the cache. + - purge: Remove all items from the cache. + + ```` can be a glob expression or a package name. + """ + + ignore_require_venv = True + usage = """ + %prog dir + %prog info + %prog list [] [--format=[human, abspath]] + %prog remove + %prog purge + """ + + def add_options(self) -> None: + + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="human", + choices=("human", "abspath"), + help="Select the output format among: human (default) or abspath", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "dir": self.get_cache_dir, + "info": self.get_cache_info, + "list": self.list_cache_items, + "remove": self.remove_cache_items, + "purge": self.purge_cache, + } + + if not options.cache_dir: + logger.error("pip cache commands can not function since cache is disabled.") + return ERROR + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def get_cache_dir(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + logger.info(options.cache_dir) + + def get_cache_info(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + num_http_files = len(self._find_http_files(options)) + num_packages = len(self._find_wheels(options, "*")) + + http_cache_location = self._cache_dir(options, "http") + wheels_cache_location = self._cache_dir(options, "wheels") + http_cache_size = filesystem.format_directory_size(http_cache_location) + wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) + + message = ( + textwrap.dedent( + """ + Package index page cache location: {http_cache_location} + Package index page cache size: {http_cache_size} + Number of HTTP files: {num_http_files} + Wheels location: {wheels_cache_location} + Wheels size: {wheels_cache_size} + Number of wheels: {package_count} + """ + ) + .format( + http_cache_location=http_cache_location, + http_cache_size=http_cache_size, + num_http_files=num_http_files, + wheels_cache_location=wheels_cache_location, + package_count=num_packages, + wheels_cache_size=wheels_cache_size, + ) + .strip() + ) + + logger.info(message) + + def list_cache_items(self, options: Values, args: List[Any]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if args: + pattern = args[0] + else: + pattern = "*" + + files = self._find_wheels(options, pattern) + if options.list_format == "human": + self.format_for_human(files) + else: + self.format_for_abspath(files) + + def format_for_human(self, files: List[str]) -> None: + if not files: + logger.info("Nothing cached.") + return + + results = [] + for filename in files: + wheel = os.path.basename(filename) + size = filesystem.format_file_size(filename) + results.append(f" - {wheel} ({size})") + logger.info("Cache contents:\n") + logger.info("\n".join(sorted(results))) + + def format_for_abspath(self, files: List[str]) -> None: + if not files: + return + + results = [] + for filename in files: + results.append(filename) + + logger.info("\n".join(sorted(results))) + + def remove_cache_items(self, options: Values, args: List[Any]) -> None: + if len(args) > 1: + raise CommandError("Too many arguments") + + if not args: + raise CommandError("Please provide a pattern") + + files = self._find_wheels(options, args[0]) + + no_matching_msg = "No matching packages" + if args[0] == "*": + # Only fetch http files if no specific pattern given + files += self._find_http_files(options) + else: + # Add the pattern to the log message + no_matching_msg += ' for pattern "{}"'.format(args[0]) + + if not files: + logger.warning(no_matching_msg) + + for filename in files: + os.unlink(filename) + logger.verbose("Removed %s", filename) + logger.info("Files removed: %s", len(files)) + + def purge_cache(self, options: Values, args: List[Any]) -> None: + if args: + raise CommandError("Too many arguments") + + return self.remove_cache_items(options, ["*"]) + + def _cache_dir(self, options: Values, subdir: str) -> str: + return os.path.join(options.cache_dir, subdir) + + def _find_http_files(self, options: Values) -> List[str]: + http_dir = self._cache_dir(options, "http") + return filesystem.find_files(http_dir, "*") + + def _find_wheels(self, options: Values, pattern: str) -> List[str]: + wheel_dir = self._cache_dir(options, "wheels") + + # The wheel filename format, as specified in PEP 427, is: + # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl + # + # Additionally, non-alphanumeric values in the distribution are + # normalized to underscores (_), meaning hyphens can never occur + # before `-{version}`. + # + # Given that information: + # - If the pattern we're given contains a hyphen (-), the user is + # providing at least the version. Thus, we can just append `*.whl` + # to match the rest of it. + # - If the pattern we're given doesn't contain a hyphen (-), the + # user is only providing the name. Thus, we append `-*.whl` to + # match the hyphen before the version, followed by anything else. + # + # PEP 427: https://www.python.org/dev/peps/pep-0427/ + pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl") + + return filesystem.find_files(wheel_dir, pattern) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/check.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/check.py new file mode 100644 index 000000000..3864220b2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,53 @@ +import logging +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.operations.check import ( + check_package_set, + create_package_set_from_installed, +) +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + + usage = """ + %prog [options]""" + + def run(self, options: Values, args: List[str]) -> int: + + package_set, parsing_probs = create_package_set_from_installed() + missing, conflicting = check_package_set(package_set) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + write_output( + "%s %s requires %s, which is not installed.", + project_name, + version, + dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + write_output( + "%s %s has requirement %s, but you have %s %s.", + project_name, + version, + req, + dep_name, + dep_version, + ) + + if missing or conflicting or parsing_probs: + return ERROR + else: + write_output("No broken requirements found.") + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/completion.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 000000000..c0fb4caf8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,96 @@ +import sys +import textwrap +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip {shell} completion start{script}# pip {shell} completion end +""" + +COMPLETION_SCRIPTS = { + "bash": """ + _pip_completion() + {{ + COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + }} + complete -o default -F _pip_completion {prog} + """, + "zsh": """ + function _pip_completion {{ + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + }} + compctl -K _pip_completion {prog} + """, + "fish": """ + function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD ( \\ + math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ + ) + set -lx PIP_AUTO_COMPLETE 1 + string split \\ -- (eval $COMP_WORDS[1]) + end + complete -fa "(__fish_complete_pip)" -c {prog} + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--bash", + "-b", + action="store_const", + const="bash", + dest="shell", + help="Emit completion code for bash", + ) + self.cmd_opts.add_option( + "--zsh", + "-z", + action="store_const", + const="zsh", + dest="shell", + help="Emit completion code for zsh", + ) + self.cmd_opts.add_option( + "--fish", + "-f", + action="store_const", + const="fish", + dest="shell", + help="Emit completion code for fish", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ["--" + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog()) + ) + print(BASE_COMPLETION.format(script=script, shell=options.shell)) + return SUCCESS + else: + sys.stderr.write( + "ERROR: You must pass {}\n".format(" or ".join(shell_options)) + ) + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/configuration.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 000000000..c6c74ed50 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,266 @@ +import logging +import os +import subprocess +from optparse import Values +from typing import Any, List, Optional + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.configuration import ( + Configuration, + Kind, + get_configuration_files, + kinds, +) +from pip._internal.exceptions import PipError +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_prog, write_output + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """ + Manage local and global configuration. + + Subcommands: + + - list: List the active configuration (or from the file specified) + - edit: Edit the configuration file in an editor + - get: Get the value associated with name + - set: Set the name=value + - unset: Unset the value associated with name + - debug: List the configuration files and values defined under them + + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen to the user file by + default. + """ + + ignore_require_venv = True + usage = """ + %prog [] list + %prog [] [--editor ] edit + + %prog [] get name + %prog [] set name value + %prog [] unset name + %prog [] debug + """ + + def add_options(self) -> None: + self.cmd_opts.add_option( + "--editor", + dest="editor", + action="store", + default=None, + help=( + "Editor to use to edit the file. Uses VISUAL or EDITOR " + "environment variables if not provided." + ), + ) + + self.cmd_opts.add_option( + "--global", + dest="global_file", + action="store_true", + default=False, + help="Use the system-wide configuration file only", + ) + + self.cmd_opts.add_option( + "--user", + dest="user_file", + action="store_true", + default=False, + help="Use the user configuration file only", + ) + + self.cmd_opts.add_option( + "--site", + dest="site_file", + action="store_true", + default=False, + help="Use the current environment configuration file only", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name, + "debug": self.list_config_values, + } + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]: + file_options = [ + key + for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) + if value + ] + + if not file_options: + if not need_value: + return None + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE + else: + return kinds.USER + elif len(file_options) == 1: + return file_options[0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --site, --global) to perform." + ) + + def list_values(self, options: Values, args: List[str]) -> None: + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + write_output("%s=%r", key, value) + + def get_name(self, options: Values, args: List[str]) -> None: + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + write_output("%s", value) + + def set_name_value(self, options: Values, args: List[str]) -> None: + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options: Values, args: List[str]) -> None: + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def list_config_values(self, options: Values, args: List[str]) -> None: + """List config key-value pairs across different config files""" + self._get_n_args(args, "debug", n=0) + + self.print_env_var_values() + # Iterate over config files and print if they exist, and the + # key-value pairs present in them if they do + for variant, files in sorted(self.configuration.iter_config_files()): + write_output("%s:", variant) + for fname in files: + with indent_log(): + file_exists = os.path.exists(fname) + write_output("%s, exists: %r", fname, file_exists) + if file_exists: + self.print_config_file_values(variant) + + def print_config_file_values(self, variant: Kind) -> None: + """Get key-value pairs from the file of a variant""" + for name, value in self.configuration.get_values_in_config(variant).items(): + with indent_log(): + write_output("%s: %s", name, value) + + def print_env_var_values(self) -> None: + """Get key-values pairs present as environment variables""" + write_output("%s:", "env_var") + with indent_log(): + for key, value in sorted(self.configuration.get_environ_vars()): + env_var = f"PIP_{key.upper()}" + write_output("%s=%r", env_var, value) + + def open_in_editor(self, options: Values, args: List[str]) -> None: + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + + try: + subprocess.check_call([editor, fname]) + except subprocess.CalledProcessError as e: + raise PipError( + "Editor Subprocess exited with exit code {}".format(e.returncode) + ) + + def _get_n_args(self, args: List[str], example: str, n: int) -> Any: + """Helper to make sure the command got the right number of arguments""" + if len(args) != n: + msg = ( + "Got unexpected number of arguments, expected {}. " + '(example: "{} config {}")' + ).format(n, get_prog(), example) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self) -> None: + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.exception( + "Unable to save configuration. Please report this as a bug." + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options: Values) -> str: + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/debug.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 000000000..d3f1f28de --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,202 @@ +import locale +import logging +import os +import sys +from optparse import Values +from types import ModuleType +from typing import Any, Dict, List, Optional + +import pip._vendor +from pip._vendor.certifi import where +from pip._vendor.packaging.version import parse as parse_version + +from pip import __file__ as pip_location +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.configuration import Configuration +from pip._internal.metadata import get_environment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version + +logger = logging.getLogger(__name__) + + +def show_value(name: str, value: Any) -> None: + logger.info("%s: %s", name, value) + + +def show_sys_implementation() -> None: + logger.info("sys.implementation:") + implementation_name = sys.implementation.name + with indent_log(): + show_value("name", implementation_name) + + +def create_vendor_txt_map() -> Dict[str, str]: + vendor_txt_path = os.path.join( + os.path.dirname(pip_location), "_vendor", "vendor.txt" + ) + + with open(vendor_txt_path) as f: + # Purge non version specifying lines. + # Also, remove any space prefix or suffixes (including comments). + lines = [ + line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line + ] + + # Transform into "module" -> version dict. + return dict(line.split("==", 1) for line in lines) # type: ignore + + +def get_module_from_module_name(module_name: str) -> ModuleType: + # Module name can be uppercase in vendor.txt for some reason... + module_name = module_name.lower() + # PATCH: setuptools is actually only pkg_resources. + if module_name == "setuptools": + module_name = "pkg_resources" + + __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pip._vendor, module_name) + + +def get_vendor_version_from_module(module_name: str) -> Optional[str]: + module = get_module_from_module_name(module_name) + version = getattr(module, "__version__", None) + + if not version: + # Try to find version in debundled module info. + env = get_environment([os.path.dirname(module.__file__)]) + dist = env.get_distribution(module_name) + if dist: + version = str(dist.version) + + return version + + +def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: + """Log the actual version and print extra info if there is + a conflict or if the actual version could not be imported. + """ + for module_name, expected_version in vendor_txt_versions.items(): + extra_message = "" + actual_version = get_vendor_version_from_module(module_name) + if not actual_version: + extra_message = ( + " (Unable to locate actual module version, using" + " vendor.txt specified version)" + ) + actual_version = expected_version + elif parse_version(actual_version) != parse_version(expected_version): + extra_message = ( + " (CONFLICT: vendor.txt suggests version should" + " be {})".format(expected_version) + ) + logger.info("%s==%s%s", module_name, actual_version, extra_message) + + +def show_vendor_versions() -> None: + logger.info("vendored library versions:") + + vendor_txt_versions = create_vendor_txt_map() + with indent_log(): + show_actual_vendor_versions(vendor_txt_versions) + + +def show_tags(options: Values) -> None: + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = "" + if formatted_target: + suffix = f" (target: {formatted_target})" + + msg = "Compatible tags: {}{}".format(len(tags), suffix) + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = ( + "...\n[First {tag_limit} tags shown. Pass --verbose to show all.]" + ).format(tag_limit=tag_limit) + logger.info(msg) + + +def ca_bundle_info(config: Configuration) -> str: + levels = set() + for key, _ in config.items(): + levels.add(key.split(".")[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ["install", "wheel", "download"] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return "global" + + if "global" in levels: + levels.remove("global") + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog """ + ignore_require_venv = True + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + self.parser.insert_option_group(0, self.cmd_opts) + self.parser.config.load() + + def run(self, options: Values, args: List[str]) -> int: + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value("pip version", get_pip_version()) + show_value("sys.version", sys.version) + show_value("sys.executable", sys.executable) + show_value("sys.getdefaultencoding", sys.getdefaultencoding()) + show_value("sys.getfilesystemencoding", sys.getfilesystemencoding()) + show_value( + "locale.getpreferredencoding", + locale.getpreferredencoding(), + ) + show_value("sys.platform", sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE")) + show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE")) + show_value("pip._vendor.certifi.where()", where()) + show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED) + + show_vendor_versions() + + show_tags(options) + + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/download.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/download.py new file mode 100644 index 000000000..233b7e983 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,140 @@ +import logging +import os +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] ... + %prog [options] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.global_options()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + + self.cmd_opts.add_option( + "-d", + "--dest", + "--destination-dir", + "--destination-directory", + dest="download_dir", + metavar="dir", + default=os.curdir, + help="Download packages into .", + ) + + cmdoptions.add_target_python_options(self.cmd_opts) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + cmdoptions.check_dist_restriction(options) + + options.download_dir = normalize_path(options.download_dir) + ensure_dir(options.download_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="download", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + verbosity=self.verbosity, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + ignore_requires_python=options.ignore_requires_python, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + downloaded: List[str] = [] + for req in requirement_set.requirements.values(): + if req.satisfied_by is None: + assert req.name is not None + preparer.save_linked_requirement(req) + downloaded.append(req.name) + if downloaded: + write_output("Successfully downloaded %s", " ".join(downloaded)) + + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/freeze.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 000000000..6e9cc7668 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,97 @@ +import sys +from optparse import Values +from typing import List + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.operations.freeze import freeze +from pip._internal.utils.compat import stdlib_pkgs + +DEV_PKGS = {"pip", "setuptools", "distribute", "wheel", "pkg-resources"} + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + + usage = """ + %prog [options]""" + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times." + ), + ) + self.cmd_opts.add_option( + "-l", + "--local", + dest="local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not output " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--all", + dest="freeze_all", + action="store_true", + help=( + "Do not skip these packages in the output:" + " {}".format(", ".join(DEV_PKGS)) + ), + ) + self.cmd_opts.add_option( + "--exclude-editable", + dest="exclude_editable", + action="store_true", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(DEV_PKGS) + + if options.excludes: + skip.update(options.excludes) + + cmdoptions.check_list_path_option(options) + + for line in freeze( + requirement=options.requirements, + local_only=options.local, + user_only=options.user, + paths=options.path, + isolated=options.isolated_mode, + skip=skip, + exclude_editable=options.exclude_editable, + ): + sys.stdout.write(line + "\n") + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/hash.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 000000000..042dac813 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,59 @@ +import hashlib +import logging +import sys +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks, write_output + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + """ + + usage = "%prog [options] ..." + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-a", + "--algorithm", + dest="algorithm", + choices=STRONG_HASHES, + action="store", + default=FAVORITE_HASH, + help="The hash algorithm to use: one of {}".format( + ", ".join(STRONG_HASHES) + ), + ) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + write_output( + "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm) + ) + return SUCCESS + + +def _hash_of_file(path: str, algorithm: str) -> str: + """Return the hash digest of a file.""" + with open(path, "rb") as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/help.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/help.py new file mode 100644 index 000000000..62066318b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,41 @@ +from optparse import Values +from typing import List + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + + usage = """ + %prog """ + ignore_require_venv = True + + def run(self, options: Values, args: List[str]) -> int: + from pip._internal.commands import ( + commands_dict, + create_command, + get_similar_commands, + ) + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = [f'unknown command "{cmd_name}"'] + if guess: + msg.append(f'maybe you meant "{guess}"') + + raise CommandError(" - ".join(msg)) + + command = create_command(cmd_name) + command.parser.print_help() + + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/index.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/index.py new file mode 100644 index 000000000..9d8aae3b5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/index.py @@ -0,0 +1,139 @@ +import logging +from optparse import Values +from typing import Any, Iterable, List, Optional, Union + +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.commands.search import print_dist_installation_info +from pip._internal.exceptions import CommandError, DistributionNotFound, PipError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.network.session import PipSession +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class IndexCommand(IndexGroupCommand): + """ + Inspect information available from package indexes. + """ + + usage = """ + %prog versions + """ + + def add_options(self) -> None: + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.pre()) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + handlers = { + "versions": self.get_available_package_versions, + } + + logger.warning( + "pip index is currently an experimental command. " + "It may be removed/changed in a future release " + "without prior warning." + ) + + # Determine action + if not args or args[0] not in handlers: + logger.error( + "Need an action (%s) to perform.", + ", ".join(sorted(handlers)), + ) + return ERROR + + action = args[0] + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _build_package_finder( + self, + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: + """ + Create a package finder appropriate to the index command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled, + ) + + def get_available_package_versions(self, options: Values, args: List[Any]) -> None: + if len(args) != 1: + raise CommandError("You need to specify exactly one argument") + + target_python = cmdoptions.make_target_python(options) + query = args[0] + + with self._build_session(options) as session: + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + + versions: Iterable[Union[LegacyVersion, Version]] = ( + candidate.version for candidate in finder.find_all_candidates(query) + ) + + if not options.pre: + # Remove prereleases + versions = ( + version for version in versions if not version.is_prerelease + ) + versions = set(versions) + + if not versions: + raise DistributionNotFound( + "No matching distribution found for {}".format(query) + ) + + formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] + latest = formatted_versions[0] + + write_output("{} ({})".format(query, latest)) + write_output("Available versions: {}".format(", ".join(formatted_versions))) + print_dist_installation_info(query, latest) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/install.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/install.py new file mode 100644 index 000000000..34e4c2f84 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,771 @@ +import errno +import operator +import os +import shutil +import site +from optparse import SUPPRESS_HELP, Values +from typing import Iterable, List, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import ( + RequirementCommand, + warn_if_run_as_root, + with_cleanup, +) +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import CommandError, InstallationError +from pip._internal.locations import get_scheme +from pip._internal.metadata import get_environment +from pip._internal.models.format_control import FormatControl +from pip._internal.operations.check import ConflictDetails, check_install_conflicts +from pip._internal.req import install_given_reqs +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.distutils_args import parse_distutils_args +from pip._internal.utils.filesystem import test_writable_dir +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + ensure_dir, + get_pip_version, + protect_pip_from_modification_on_windows, + write_output, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) +from pip._internal.wheel_builder import ( + BinaryAllowedPredicate, + build, + should_build_for_install_command, +) + +logger = getLogger(__name__) + + +def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate: + def check_binary_allowed(req: InstallRequirement) -> bool: + canonical_name = canonicalize_name(req.name or "") + allowed_formats = format_control.get_allowed_formats(canonical_name) + return "binary" in allowed_formats + + return check_binary_allowed + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.pre()) + + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option( + "-t", + "--target", + dest="target_dir", + metavar="dir", + default=None, + help=( + "Install packages into . " + "By default this will not replace existing files/folders in " + ". Use --upgrade to replace existing packages in " + "with new versions." + ), + ) + cmdoptions.add_target_python_options(self.cmd_opts) + + self.cmd_opts.add_option( + "--user", + dest="use_user_site", + action="store_true", + help=( + "Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.)" + ), + ) + self.cmd_opts.add_option( + "--no-user", + dest="use_user_site", + action="store_false", + help=SUPPRESS_HELP, + ) + self.cmd_opts.add_option( + "--root", + dest="root_path", + metavar="dir", + default=None, + help="Install everything relative to this alternate root directory.", + ) + self.cmd_opts.add_option( + "--prefix", + dest="prefix_path", + metavar="dir", + default=None, + help=( + "Installation prefix where lib, bin and other top-level " + "folders are placed" + ), + ) + + self.cmd_opts.add_option(cmdoptions.src()) + + self.cmd_opts.add_option( + "-U", + "--upgrade", + dest="upgrade", + action="store_true", + help=( + "Upgrade all specified packages to the newest available " + "version. The handling of dependencies depends on the " + "upgrade-strategy used." + ), + ) + + self.cmd_opts.add_option( + "--upgrade-strategy", + dest="upgrade_strategy", + default="only-if-needed", + choices=["only-if-needed", "eager"], + help=( + "Determines how dependency upgrading should be handled " + "[default: %default]. " + '"eager" - dependencies are upgraded regardless of ' + "whether the currently installed version satisfies the " + "requirements of the upgraded package(s). " + '"only-if-needed" - are upgraded only when they do not ' + "satisfy the requirements of the upgraded package(s)." + ), + ) + + self.cmd_opts.add_option( + "--force-reinstall", + dest="force_reinstall", + action="store_true", + help="Reinstall all packages even if they are already up-to-date.", + ) + + self.cmd_opts.add_option( + "-I", + "--ignore-installed", + dest="ignore_installed", + action="store_true", + help=( + "Ignore the installed packages, overwriting them. " + "This can break your system if the existing package " + "is of a different version or was installed " + "with a different package manager!" + ), + ) + + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + + self.cmd_opts.add_option(cmdoptions.install_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + self.cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + self.cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.require_hashes()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + if options.use_user_site and options.target_dir is not None: + raise CommandError("Can not combine '--user' and '--target'") + + cmdoptions.check_install_build_global(options) + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + cmdoptions.check_dist_restriction(options, check_target=True) + + install_options = options.install_options or [] + + logger.verbose("Using %s", get_pip_version()) + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir: Optional[TempDirectory] = None + target_temp_dir_path: Optional[str] = None + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if ( + # fmt: off + os.path.exists(options.target_dir) and + not os.path.isdir(options.target_dir) + # fmt: on + ): + raise CommandError( + "Target path exists but is not a directory, will not continue." + ) + + # Create a target directory for using with the target option + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + self.enter_context(target_temp_dir) + + global_options = options.global_options or [] + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="install", + globally_managed=True, + ) + + try: + reqs = self.get_requirements(args, options, finder, session) + + # Only when installing is it permitted to use PEP 660. + # In other circumstances (pip wheel, pip download) we generate + # regular (i.e. non editable) metadata and wheels. + for req in reqs: + req.permit_editable_wheels = True + + reject_location_related_install_options(reqs, options.install_options) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, + verbosity=self.verbosity, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve( + reqs, check_supported_wheels=not options.target_dir + ) + + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = False + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) + + check_binary_allowed = get_check_binary_allowed(finder.format_control) + + reqs_to_build = [ + r + for r in requirement_set.requirements.values() + if should_build_for_install_command(r, check_binary_allowed) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=True, + build_options=[], + global_options=[], + ) + + # If we're using PEP 517, we cannot do a legacy setup.py install + # so we fail here. + pep517_build_failure_names: List[str] = [ + r.name for r in build_failures if r.use_pep517 # type: ignore + ] + if pep517_build_failure_names: + raise InstallationError( + "Could not build wheels for {}, which is required to " + "install pyproject.toml-based projects".format( + ", ".join(pep517_build_failure_names) + ) + ) + + # For now, we just warn about failures building legacy + # requirements, as we'll fall through to a setup.py install for + # those. + for r in build_failures: + if not r.use_pep517: + r.legacy_install_reason = 8368 + + to_install = resolver.get_installation_order(requirement_set) + + # Check for conflicts in the package set we're installing. + conflicts: Optional[ConflictDetails] = None + should_warn_about_conflicts = ( + not options.ignore_dependencies and options.warn_about_conflicts + ) + if should_warn_about_conflicts: + conflicts = self._determine_conflicts(to_install) + + # Don't warn about script install locations if + # --target or --prefix has been specified + warn_script_location = options.warn_script_location + if options.target_dir or options.prefix_path: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + pycompile=options.compile, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + env = get_environment(lib_locations) + + installed.sort(key=operator.attrgetter("name")) + items = [] + for result in installed: + item = result.name + try: + installed_dist = env.get_distribution(item) + if installed_dist is not None: + item = f"{item}-{installed_dist.version}" + except Exception: + pass + items.append(item) + + if conflicts is not None: + self._warn_about_conflicts( + conflicts, + resolver_variant=self.determine_resolver_variant(options), + ) + + installed_desc = " ".join(items) + if installed_desc: + write_output( + "Successfully installed %s", + installed_desc, + ) + except OSError as error: + show_traceback = self.verbosity >= 1 + + message = create_os_error_message( + error, + show_traceback, + options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) # noqa + + return ERROR + + if options.target_dir: + assert target_temp_dir + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + + warn_if_run_as_root() + return SUCCESS + + def _handle_target_dir( + self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool + ) -> None: + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = get_scheme("", home=target_temp_dir.path) + purelib_dir = scheme.purelib + platlib_dir = scheme.platlib + data_dir = scheme.data + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + "Target directory %s already exists. Specify " + "--upgrade to force replacement.", + target_item_dir, + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + "Target directory %s already exists and is " + "a link. pip will not automatically replace " + "links, please remove if replacement is " + "desired.", + target_item_dir, + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move(os.path.join(lib_dir, item), target_item_dir) + + def _determine_conflicts( + self, to_install: List[InstallRequirement] + ) -> Optional[ConflictDetails]: + try: + return check_install_conflicts(to_install) + except Exception: + logger.exception( + "Error while checking for conflicts. Please file an issue on " + "pip's issue tracker: https://github.com/pypa/pip/issues/new" + ) + return None + + def _warn_about_conflicts( + self, conflict_details: ConflictDetails, resolver_variant: str + ) -> None: + package_set, (missing, conflicting) = conflict_details + if not missing and not conflicting: + return + + parts: List[str] = [] + if resolver_variant == "legacy": + parts.append( + "pip's legacy dependency resolver does not consider dependency " + "conflicts when selecting packages. This behaviour is the " + "source of the following dependency conflicts." + ) + else: + assert resolver_variant == "2020-resolver" + parts.append( + "pip's dependency resolver does not currently take into account " + "all the packages that are installed. This behaviour is the " + "source of the following dependency conflicts." + ) + + # NOTE: There is some duplication here, with commands/check.py + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + message = ( + "{name} {version} requires {requirement}, " + "which is not installed." + ).format( + name=project_name, + version=version, + requirement=dependency[1], + ) + parts.append(message) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + message = ( + "{name} {version} requires {requirement}, but {you} have " + "{dep_name} {dep_version} which is incompatible." + ).format( + name=project_name, + version=version, + requirement=req, + dep_name=dep_name, + dep_version=dep_version, + you=("you" if resolver_variant == "2020-resolver" else "you'll"), + ) + parts.append(message) + + logger.critical("\n".join(parts)) + + +def get_lib_location_guesses( + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> List[str]: + scheme = get_scheme( + "", + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + return [scheme.purelib, scheme.platlib] + + +def site_packages_writable(root: Optional[str], isolated: bool) -> bool: + return all( + test_writable_dir(d) + for d in set(get_lib_location_guesses(root=root, isolated=isolated)) + ) + + +def decide_user_install( + use_user_site: Optional[bool], + prefix_path: Optional[str] = None, + target_dir: Optional[str] = None, + root_path: Optional[str] = None, + isolated_mode: bool = False, +) -> bool: + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info( + "Defaulting to user installation because normal site-packages " + "is not writeable" + ) + return True + + +def reject_location_related_install_options( + requirements: List[InstallRequirement], options: Optional[List[str]] +) -> None: + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + + def format_options(option_names: Iterable[str]) -> List[str]: + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + offenders = [] + + for requirement in requirements: + install_options = requirement.install_options + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format(format_options(location_options.keys())) + ) + + if not offenders: + return + + raise CommandError( + "Location-changing options found in --install-option: {}." + " This is unsupported, use pip-level options like --user," + " --prefix, --root, and --target instead.".format("; ".join(offenders)) + ) + + +def create_os_error_message( + error: OSError, show_traceback: bool, using_user_site: bool +) -> str: + """Format an error message for an OSError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an OSError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not running_under_virtualenv() and not using_user_site: + parts.extend( + [ + user_option_part, + " or ", + permissions_part.lower(), + ] + ) + else: + parts.append(permissions_part) + parts.append(".\n") + + # Suggest the user to enable Long Paths if path length is + # more than 260 + if ( + WINDOWS + and error.errno == errno.ENOENT + and error.filename + and len(error.filename) > 260 + ): + parts.append( + "HINT: This error might have occurred since " + "this system does not have Windows Long Path " + "support enabled. You can find information on " + "how to enable this at " + "https://pip.pypa.io/warnings/enable-long-paths\n" + ) + + return "".join(parts).strip() + "\n" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/list.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/list.py new file mode 100644 index 000000000..3a545e90d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,363 @@ +import json +import logging +from optparse import Values +from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.compat import stdlib_pkgs +from pip._internal.utils.misc import tabulate, write_output + +if TYPE_CHECKING: + from pip._internal.metadata.base import DistributionVersion + + class _DistWithLatestInfo(BaseDistribution): + """Give the distribution object a couple of extra fields. + + These will be populated during ``get_outdated()``. This is dirty but + makes the rest of the code much cleaner. + """ + + latest_version: DistributionVersion + latest_filetype: str + + _ProcessedDists = Sequence[_DistWithLatestInfo] + + +from pip._vendor.packaging.version import parse + +logger = logging.getLogger(__name__) + + +class ListCommand(IndexGroupCommand): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + + ignore_require_venv = True + usage = """ + %prog [options]""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-o", + "--outdated", + action="store_true", + default=False, + help="List outdated packages", + ) + self.cmd_opts.add_option( + "-u", + "--uptodate", + action="store_true", + default=False, + help="List uptodate packages", + ) + self.cmd_opts.add_option( + "-e", + "--editable", + action="store_true", + default=False, + help="List editable projects.", + ) + self.cmd_opts.add_option( + "-l", + "--local", + action="store_true", + default=False, + help=( + "If in a virtualenv that has global access, do not list " + "globally-installed packages." + ), + ) + self.cmd_opts.add_option( + "--user", + dest="user", + action="store_true", + default=False, + help="Only output packages installed in user-site.", + ) + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option( + "--format", + action="store", + dest="list_format", + default="columns", + choices=("columns", "freeze", "json"), + help="Select the output format among: columns (default), freeze, or json", + ) + + self.cmd_opts.add_option( + "--not-required", + action="store_true", + dest="not_required", + help="List packages that are not dependencies of installed packages.", + ) + + self.cmd_opts.add_option( + "--exclude-editable", + action="store_false", + dest="include_editable", + help="Exclude editable package from output.", + ) + self.cmd_opts.add_option( + "--include-editable", + action="store_true", + dest="include_editable", + help="Include editable package from output.", + default=True, + ) + self.cmd_opts.add_option(cmdoptions.list_exclude()) + index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + def _build_package_finder( + self, options: Values, session: PipSession + ) -> PackageFinder: + """ + Create a package finder appropriate to this list command. + """ + link_collector = LinkCollector.create(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled, + ) + + def run(self, options: Values, args: List[str]) -> int: + if options.outdated and options.uptodate: + raise CommandError("Options --outdated and --uptodate cannot be combined.") + + cmdoptions.check_list_path_option(options) + + skip = set(stdlib_pkgs) + if options.excludes: + skip.update(canonicalize_name(n) for n in options.excludes) + + packages: "_ProcessedDists" = [ + cast("_DistWithLatestInfo", d) + for d in get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + skip=skip, + ) + ] + + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. + if options.not_required: + packages = self.get_not_required(packages, options) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + self.output_package_listing(packages, options) + return SUCCESS + + def get_outdated( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if parse(str(dist.latest_version)) > parse(str(dist.version)) + ] + + def get_uptodate( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + return [ + dist + for dist in self.iter_packages_latest_infos(packages, options) + if parse(str(dist.latest_version)) == parse(str(dist.version)) + ] + + def get_not_required( + self, packages: "_ProcessedDists", options: Values + ) -> "_ProcessedDists": + dep_keys = { + canonicalize_name(dep.name) + for dist in packages + for dep in (dist.iter_dependencies() or ()) + } + + # Create a set to remove duplicate packages, and cast it to a list + # to keep the return type consistent with get_outdated and + # get_uptodate + return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) + + def iter_packages_latest_infos( + self, packages: "_ProcessedDists", options: Values + ) -> Iterator["_DistWithLatestInfo"]: + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + + def latest_info( + dist: "_DistWithLatestInfo", + ) -> Optional["_DistWithLatestInfo"]: + all_candidates = finder.find_all_candidates(dist.canonical_name) + if not options.pre: + # Remove prereleases + all_candidates = [ + candidate + for candidate in all_candidates + if not candidate.version.is_prerelease + ] + + evaluator = finder.make_candidate_evaluator( + project_name=dist.canonical_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + return None + + remote_version = best_candidate.version + if best_candidate.link.is_wheel: + typ = "wheel" + else: + typ = "sdist" + dist.latest_version = remote_version + dist.latest_filetype = typ + return dist + + for dist in map(latest_info, packages): + if dist is not None: + yield dist + + def output_package_listing( + self, packages: "_ProcessedDists", options: Values + ) -> None: + packages = sorted( + packages, + key=lambda dist: dist.canonical_name, + ) + if options.list_format == "columns" and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == "freeze": + for dist in packages: + if options.verbose >= 1: + write_output( + "%s==%s (%s)", dist.raw_name, dist.version, dist.location + ) + else: + write_output("%s==%s", dist.raw_name, dist.version) + elif options.list_format == "json": + write_output(format_for_json(packages, options)) + + def output_package_listing_columns( + self, data: List[List[str]], header: List[str] + ) -> None: + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) + + for val in pkg_strings: + write_output(val) + + +def format_for_columns( + pkgs: "_ProcessedDists", options: Values +) -> Tuple[List[List[str]], List[str]]: + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + header = ["Package", "Version"] + + running_outdated = options.outdated + if running_outdated: + header.extend(["Latest", "Type"]) + + has_editables = any(x.editable for x in pkgs) + if has_editables: + header.append("Editable project location") + + if options.verbose >= 1: + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + data = [] + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.raw_name, str(proj.version)] + + if running_outdated: + row.append(str(proj.latest_version)) + row.append(proj.latest_filetype) + + if has_editables: + row.append(proj.editable_project_location or "") + + if options.verbose >= 1: + row.append(proj.location or "") + if options.verbose >= 1: + row.append(proj.installer) + + data.append(row) + + return data, header + + +def format_for_json(packages: "_ProcessedDists", options: Values) -> str: + data = [] + for dist in packages: + info = { + "name": dist.raw_name, + "version": str(dist.version), + } + if options.verbose >= 1: + info["location"] = dist.location or "" + info["installer"] = dist.installer + if options.outdated: + info["latest_version"] = str(dist.latest_version) + info["latest_filetype"] = dist.latest_filetype + editable_project_location = dist.editable_project_location + if editable_project_location: + info["editable_project_location"] = editable_project_location + data.append(info) + return json.dumps(data) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/search.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/search.py new file mode 100644 index 000000000..03ed925b2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,174 @@ +import logging +import shutil +import sys +import textwrap +import xmlrpc.client +from collections import OrderedDict +from optparse import Values +from typing import TYPE_CHECKING, Dict, List, Optional + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.metadata import get_default_environment +from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import write_output + +if TYPE_CHECKING: + from typing import TypedDict + + class TransformedHit(TypedDict): + name: str + summary: str + versions: List[str] + + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command, SessionCommandMixin): + """Search for PyPI packages whose name or summary contains .""" + + usage = """ + %prog [options] """ + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-i", + "--index", + dest="index", + metavar="URL", + default=PyPI.pypi_url, + help="Base URL of Python Package Index (default %default)", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + raise CommandError("Missing required argument (search query).") + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = shutil.get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query: List[str], options: Values) -> List[Dict[str, str]]: + index_url = options.index + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc.client.ServerProxy(index_url, transport) + try: + hits = pypi.search({"name": query, "summary": query}, "or") + except xmlrpc.client.Fault as fault: + message = "XMLRPC request failed [code: {code}]\n{string}".format( + code=fault.faultCode, + string=fault.faultString, + ) + raise CommandError(message) + assert isinstance(hits, list) + return hits + + +def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]: + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages: Dict[str, "TransformedHit"] = OrderedDict() + for hit in hits: + name = hit["name"] + summary = hit["summary"] + version = hit["version"] + + if name not in packages.keys(): + packages[name] = { + "name": name, + "summary": summary, + "versions": [version], + } + else: + packages[name]["versions"].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]["versions"]): + packages[name]["summary"] = summary + + return list(packages.values()) + + +def print_dist_installation_info(name: str, latest: str) -> None: + env = get_default_environment() + dist = env.get_distribution(name) + if dist is not None: + with indent_log(): + if dist.version == latest: + write_output("INSTALLED: %s (latest)", dist.version) + else: + write_output("INSTALLED: %s", dist.version) + if parse_version(latest).pre: + write_output( + "LATEST: %s (pre-release; install" + " with `pip install --pre`)", + latest, + ) + else: + write_output("LATEST: %s", latest) + + +def print_results( + hits: List["TransformedHit"], + name_column_width: Optional[int] = None, + terminal_width: Optional[int] = None, +) -> None: + if not hits: + return + if name_column_width is None: + name_column_width = ( + max( + [ + len(hit["name"]) + len(highest_version(hit.get("versions", ["-"]))) + for hit in hits + ] + ) + + 4 + ) + + for hit in hits: + name = hit["name"] + summary = hit["summary"] or "" + latest = highest_version(hit.get("versions", ["-"])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary_lines = textwrap.wrap(summary, target_width) + summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines) + + name_latest = f"{name} ({latest})" + line = f"{name_latest:{name_column_width}} - {summary}" + try: + write_output(line) + print_dist_installation_info(name, latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions: List[str]) -> str: + return max(versions, key=parse_version) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/show.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/show.py new file mode 100644 index 000000000..d5540d68b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,178 @@ +import logging +from optparse import Values +from typing import Iterator, List, NamedTuple, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + + usage = """ + %prog [options] ...""" + ignore_require_venv = True + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-f", + "--files", + dest="files", + action="store_true", + default=False, + help="Show the full list of installed files for each package.", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + if not args: + logger.warning("ERROR: Please provide a package name or names.") + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose + ): + return ERROR + return SUCCESS + + +class _PackageInfo(NamedTuple): + name: str + version: str + location: str + requires: List[str] + required_by: List[str] + installer: str + metadata_version: str + classifiers: List[str] + summary: str + homepage: str + author: str + author_email: str + license: str + entry_points: List[str] + files: Optional[List[str]] + + +def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]: + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + env = get_default_environment() + + installed = {dist.canonical_name: dist for dist in env.iter_distributions()} + query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning("Package(s) not found: %s", ", ".join(missing)) + + def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: + return ( + dist.metadata["Name"] or "UNKNOWN" + for dist in installed.values() + if current_dist.canonical_name + in {canonicalize_name(d.name) for d in dist.iter_dependencies()} + ) + + for query_name in query_names: + try: + dist = installed[query_name] + except KeyError: + continue + + requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower) + required_by = sorted(_get_requiring_packages(dist), key=str.lower) + + try: + entry_points_text = dist.read_text("entry_points.txt") + entry_points = entry_points_text.splitlines(keepends=False) + except FileNotFoundError: + entry_points = [] + + files_iter = dist.iter_declared_entries() + if files_iter is None: + files: Optional[List[str]] = None + else: + files = sorted(files_iter) + + metadata = dist.metadata + + yield _PackageInfo( + name=dist.raw_name, + version=str(dist.version), + location=dist.location or "", + requires=requires, + required_by=required_by, + installer=dist.installer, + metadata_version=dist.metadata_version or "", + classifiers=metadata.get_all("Classifier", []), + summary=metadata.get("Summary", ""), + homepage=metadata.get("Home-page", ""), + author=metadata.get("Author", ""), + author_email=metadata.get("Author-email", ""), + license=metadata.get("License", ""), + entry_points=entry_points, + files=files, + ) + + +def print_results( + distributions: Iterator[_PackageInfo], + list_files: bool, + verbose: bool, +) -> bool: + """ + Print the information from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + write_output("---") + + write_output("Name: %s", dist.name) + write_output("Version: %s", dist.version) + write_output("Summary: %s", dist.summary) + write_output("Home-page: %s", dist.homepage) + write_output("Author: %s", dist.author) + write_output("Author-email: %s", dist.author_email) + write_output("License: %s", dist.license) + write_output("Location: %s", dist.location) + write_output("Requires: %s", ", ".join(dist.requires)) + write_output("Required-by: %s", ", ".join(dist.required_by)) + + if verbose: + write_output("Metadata-Version: %s", dist.metadata_version) + write_output("Installer: %s", dist.installer) + write_output("Classifiers:") + for classifier in dist.classifiers: + write_output(" %s", classifier) + write_output("Entry-points:") + for entry in dist.entry_points: + write_output(" %s", entry.strip()) + if list_files: + write_output("Files:") + if dist.files is None: + write_output("Cannot locate RECORD or installed-files.txt") + else: + for line in dist.files: + write_output(" %s", line.strip()) + return results_printed diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 000000000..bb9e8e6a3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,105 @@ +import logging +from optparse import Values +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import InstallationError +from pip._internal.req import parse_requirements +from pip._internal.req.constructors import ( + install_req_from_line, + install_req_from_parsed_requirement, +) +from pip._internal.utils.misc import protect_pip_from_modification_on_windows + +logger = logging.getLogger(__name__) + + +class UninstallCommand(Command, SessionCommandMixin): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + + usage = """ + %prog [options] ... + %prog [options] -r ...""" + + def add_options(self) -> None: + self.cmd_opts.add_option( + "-r", + "--requirement", + dest="requirements", + action="append", + default=[], + metavar="file", + help=( + "Uninstall all the packages listed in the given requirements " + "file. This option can be used multiple times." + ), + ) + self.cmd_opts.add_option( + "-y", + "--yes", + dest="yes", + action="store_true", + help="Don't ask for confirmation of uninstall deletions.", + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options: Values, args: List[str]) -> int: + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, + isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + else: + logger.warning( + "Invalid requirement: %r ignored -" + " the uninstall command expects named" + " requirements.", + name, + ) + for filename in options.requirements: + for parsed_req in parse_requirements( + filename, options=options, session=session + ): + req = install_req_from_parsed_requirement( + parsed_req, isolated=options.isolated_mode + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + f"You must give at least one requirement to {self.name} (see " + f'"pip help {self.name}")' + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, + verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() + + warn_if_run_as_root() + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/commands/wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 000000000..d5b20dc9f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,178 @@ +import logging +import os +import shutil +from optparse import Values +from typing import List + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import RequirementCommand, with_cleanup +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + def add_options(self) -> None: + + self.cmd_opts.add_option( + "-w", + "--wheel-dir", + dest="wheel_dir", + metavar="dir", + default=os.curdir, + help=( + "Build wheels into , where the default is the " + "current working directory." + ), + ) + self.cmd_opts.add_option(cmdoptions.no_binary()) + self.cmd_opts.add_option(cmdoptions.only_binary()) + self.cmd_opts.add_option(cmdoptions.prefer_binary()) + self.cmd_opts.add_option(cmdoptions.no_build_isolation()) + self.cmd_opts.add_option(cmdoptions.use_pep517()) + self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.constraints()) + self.cmd_opts.add_option(cmdoptions.editable()) + self.cmd_opts.add_option(cmdoptions.requirements()) + self.cmd_opts.add_option(cmdoptions.src()) + self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) + self.cmd_opts.add_option(cmdoptions.no_deps()) + self.cmd_opts.add_option(cmdoptions.progress_bar()) + + self.cmd_opts.add_option( + "--no-verify", + dest="no_verify", + action="store_true", + default=False, + help="Don't verify if built wheel is valid.", + ) + + self.cmd_opts.add_option(cmdoptions.build_options()) + self.cmd_opts.add_option(cmdoptions.global_options()) + + self.cmd_opts.add_option( + "--pre", + action="store_true", + default=False, + help=( + "Include pre-release and development versions. By default, " + "pip only finds stable versions." + ), + ) + + self.cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, self.cmd_opts) + + @with_cleanup + def run(self, options: Values, args: List[str]) -> int: + cmdoptions.check_install_build_global(options) + + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + req_tracker = self.enter_context(get_requirement_tracker()) + + directory = TempDirectory( + delete=not options.no_clean, + kind="wheel", + globally_managed=True, + ) + + reqs = self.get_requirements(args, options, finder, session) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.wheel_dir, + use_user_site=False, + verbosity=self.verbosity, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + + reqs_to_build: List[InstallRequirement] = [] + for req in requirement_set.requirements.values(): + if req.is_wheel: + preparer.save_linked_requirement(req) + elif should_build_for_wheel_command(req): + reqs_to_build.append(req) + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + verify=(not options.no_verify), + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError("Failed to build one or more wheels") + + return SUCCESS diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/configuration.py b/myenv/lib/python3.10/site-packages/pip/_internal/configuration.py new file mode 100644 index 000000000..a8092d1ae --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/configuration.py @@ -0,0 +1,366 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +import configparser +import locale +import os +import sys +from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple + +from pip._internal.exceptions import ( + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, +) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ensure_dir, enum + +RawConfigParser = configparser.RawConfigParser # Shorthand +Kind = NewType("Kind", str) + +CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf" +ENV_NAMES_IGNORED = "version", "help" + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + SITE="site", # [Virtual] Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) +OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE + +logger = getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name: str) -> str: + """Make a name consistent regardless of source (environment or file)""" + name = name.lower().replace("_", "-") + if name.startswith("--"): + name = name[2:] # only prefer long opts + return name + + +def _disassemble_key(name: str) -> List[str]: + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + "Perhaps you wanted to use 'global.{}' instead?" + ).format(name) + raise ConfigurationError(error_message) + return name.split(".", 1) + + +def get_configuration_files() -> Dict[Kind, List[str]]: + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + os.path.expanduser("~"), + "pip" if WINDOWS else ".pip", + CONFIG_BASENAME, + ) + new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + +class Configuration: + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None: + super().__init__() + + if load_only is not None and load_only not in VALID_LOAD_ONLY: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, VALID_LOAD_ONLY)) + ) + ) + self.isolated = isolated + self.load_only = load_only + + # Because we keep track of where we got the data from + self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = { + variant: [] for variant in OVERRIDE_ORDER + } + self._config: Dict[Kind, Dict[str, Any]] = { + variant: {} for variant in OVERRIDE_ORDER + } + self._modified_parsers: List[Tuple[str, RawConfigParser]] = [] + + def load(self) -> None: + """Loads configuration from configuration files and environment""" + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self) -> Optional[str]: + """Returns the file with highest priority in configuration""" + assert self.load_only is not None, "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self) -> Iterable[Tuple[str, Any]]: + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key: str) -> Any: + """Get a value from the configuration.""" + try: + return self._dictionary[key] + except KeyError: + raise ConfigurationError(f"No such key - {key}") + + def set_value(self, key: str, value: Any) -> None: + """Modify a value in the configuration.""" + self._ensure_have_load_only() + + assert self.load_only + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key: str) -> None: + """Unset a value in the configuration.""" + self._ensure_have_load_only() + + assert self.load_only + if key not in self._config[self.load_only]: + raise ConfigurationError(f"No such key - {key}") + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + if not ( + parser.has_section(section) and parser.remove_option(section, name) + ): + # The option was not removed. + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + # The section may be empty after the option was removed. + if not parser.items(section): + parser.remove_section(section) + self._mark_as_modified(fname, parser) + + del self._config[self.load_only][key] + + def save(self) -> None: + """Save the current in-memory state.""" + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + with open(fname, "w") as f: + parser.write(f) + + # + # Private routines + # + + def _ensure_have_load_only(self) -> None: + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self) -> Dict[str, Any]: + """A dictionary representing the loaded configuration.""" + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in OVERRIDE_ORDER: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self) -> None: + """Loads configuration from configuration files""" + config_files = dict(self.iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug("Skipping file '%s' (variant: %s)", fname, variant) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant: Kind, fname: str) -> RawConfigParser: + logger.verbose("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname: str) -> RawConfigParser: + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + locale_encoding = locale.getpreferredencoding(False) + try: + parser.read(fname, encoding=locale_encoding) + except UnicodeDecodeError: + # See https://github.com/pypa/pip/issues/4963 + raise ConfigurationFileCouldNotBeLoaded( + reason=f"contains invalid {locale_encoding} characters", + fname=fname, + ) + except configparser.Error as error: + # See https://github.com/pypa/pip/issues/4893 + raise ConfigurationFileCouldNotBeLoaded(error=error) + return parser + + def _load_environment_vars(self) -> None: + """Loads configuration from environment variables""" + self._config[kinds.ENV_VAR].update( + self._normalized_keys(":env:", self.get_environ_vars()) + ) + + def _normalized_keys( + self, section: str, items: Iterable[Tuple[str, Any]] + ) -> Dict[str, Any]: + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def get_environ_vars(self) -> Iterable[Tuple[str, str]]: + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if key.startswith("PIP_"): + name = key[4:].lower() + if name not in ENV_NAMES_IGNORED: + yield name, val + + # XXX: This is patched in the tests. + def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. + """ + # SMELL: Move the conditions out of this function + + # environment variables have the lowest priority + config_file = os.environ.get("PIP_CONFIG_FILE", None) + if config_file is not None: + yield kinds.ENV, [config_file] + else: + yield kinds.ENV, [] + + config_files = get_configuration_files() + + # at the base we have any global configuration + yield kinds.GLOBAL, config_files[kinds.GLOBAL] + + # per-user configuration next + should_load_user_config = not self.isolated and not ( + config_file and os.path.exists(config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, config_files[kinds.USER] + + # finally virtualenv configuration first trumping others + yield kinds.SITE, config_files[kinds.SITE] + + def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: + """Get values present in a config file""" + return self._config[variant] + + def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]: + # Determine which parser to modify + assert self.load_only + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None: + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._dictionary!r})" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 000000000..9a89a838b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,21 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement( + install_req: InstallRequirement, +) -> AbstractDistribution: + """Returns a Distribution for the given InstallRequirement""" + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..c42a6347f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc new file mode 100644 index 000000000..8d86cd7d6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc new file mode 100644 index 000000000..ecb33bab6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc new file mode 100644 index 000000000..9de0e381b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..8c7ccd133 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/base.py b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 000000000..149fff55d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,36 @@ +import abc + +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata.base import BaseDistribution +from pip._internal.req import InstallRequirement + + +class AbstractDistribution(metaclass=abc.ABCMeta): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req: InstallRequirement) -> None: + super().__init__() + self.req = req + + @abc.abstractmethod + def get_metadata_distribution(self) -> BaseDistribution: + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + raise NotImplementedError() diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/installed.py b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 000000000..be5962f98 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,20 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + def get_metadata_distribution(self) -> BaseDistribution: + assert self.req.satisfied_by is not None, "not actually installed" + return self.req.satisfied_by + + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + pass diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 000000000..bdaf4033c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,127 @@ +import logging +from typing import Iterable, Set, Tuple + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_metadata_distribution(self) -> BaseDistribution: + return self.req.get_dist() + + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + # Setup an isolated environment and install the build backend static + # requirements in it. + self._prepare_build_backend(finder) + # Check that if the requirement is editable, it either supports PEP 660 or + # has a setup.py or a setup.cfg. This cannot be done earlier because we need + # to setup the build backend to verify it supports build_editable, nor can + # it be done later, because we want to avoid installing build requirements + # needlessly. Doing it here also works around setuptools generating + # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory + # without setup.py nor setup.cfg. + self.req.isolated_editable_sanity_check() + # Install the dynamic build requirements. + self._install_build_reqs(finder) + + self.req.prepare_metadata() + + def _prepare_build_backend(self, finder: PackageFinder) -> None: + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, "overlay", kind="build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + self._raise_conflicts("PEP 517/518 supported requirements", conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))), + ) + + def _get_build_requires_wheel(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message("Getting requirements to build wheel") + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_wheel() + + def _get_build_requires_editable(self) -> Iterable[str]: + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build editable" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + return backend.get_requires_for_build_editable() + + def _install_build_reqs(self, finder: PackageFinder) -> None: + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + if ( + self.req.editable + and self.req.permit_editable_wheels + and self.req.supports_pyproject_editable() + ): + build_reqs = self._get_build_requires_editable() + else: + build_reqs = self._get_build_requires_wheel() + conflicting, missing = self.req.build_env.check_requirements(build_reqs) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, "normal", kind="backend dependencies" + ) + + def _raise_conflicts( + self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]] + ) -> None: + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=", ".join( + f"{installed} is incompatible with {wanted}" + for installed, wanted in sorted(conflicting_reqs) + ), + ) + raise InstallationError(error_message) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 000000000..340b0f3c5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,31 @@ +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + def get_metadata_distribution(self) -> BaseDistribution: + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + assert self.req.local_file_path, "Set as part of preparation during download" + assert self.req.name, "Wheels are never unnamed" + wheel = FilesystemWheel(self.req.local_file_path) + return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) + + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: + pass diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/exceptions.py b/myenv/lib/python3.10/site-packages/pip/_internal/exceptions.py new file mode 100644 index 000000000..97b9612a1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,658 @@ +"""Exceptions used throughout package. + +This module MUST NOT try to import from anything within `pip._internal` to +operate. This is expected to be importable from any/all files within the +subpackage and, thus, should not depend on them. +""" + +import configparser +import re +from itertools import chain, groupby, repeat +from typing import TYPE_CHECKING, Dict, List, Optional, Union + +from pip._vendor.requests.models import Request, Response +from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult +from pip._vendor.rich.markup import escape +from pip._vendor.rich.text import Text + +if TYPE_CHECKING: + from hashlib import _Hash + from typing import Literal + + from pip._internal.metadata import BaseDistribution + from pip._internal.req.req_install import InstallRequirement + + +# +# Scaffolding +# +def _is_kebab_case(s: str) -> bool: + return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None + + +def _prefix_with_indent( + s: Union[Text, str], + console: Console, + *, + prefix: str, + indent: str, +) -> Text: + if isinstance(s, Text): + text = s + else: + text = console.render_str(s) + + return console.render_str(prefix, overflow="ignore") + console.render_str( + f"\n{indent}", overflow="ignore" + ).join(text.split(allow_blank=True)) + + +class PipError(Exception): + """The base pip error.""" + + +class DiagnosticPipError(PipError): + """An error, that presents diagnostic information to the user. + + This contains a bunch of logic, to enable pretty presentation of our error + messages. Each error gets a unique reference. Each error can also include + additional context, a hint and/or a note -- which are presented with the + main error message in a consistent style. + + This is adapted from the error output styling in `sphinx-theme-builder`. + """ + + reference: str + + def __init__( + self, + *, + kind: 'Literal["error", "warning"]' = "error", + reference: Optional[str] = None, + message: Union[str, Text], + context: Optional[Union[str, Text]], + hint_stmt: Optional[Union[str, Text]], + note_stmt: Optional[Union[str, Text]] = None, + link: Optional[str] = None, + ) -> None: + # Ensure a proper reference is provided. + if reference is None: + assert hasattr(self, "reference"), "error reference not provided!" + reference = self.reference + assert _is_kebab_case(reference), "error reference must be kebab-case!" + + self.kind = kind + self.reference = reference + + self.message = message + self.context = context + + self.note_stmt = note_stmt + self.hint_stmt = hint_stmt + + self.link = link + + super().__init__(f"<{self.__class__.__name__}: {self.reference}>") + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__}(" + f"reference={self.reference!r}, " + f"message={self.message!r}, " + f"context={self.context!r}, " + f"note_stmt={self.note_stmt!r}, " + f"hint_stmt={self.hint_stmt!r}" + ")>" + ) + + def __rich_console__( + self, + console: Console, + options: ConsoleOptions, + ) -> RenderResult: + colour = "red" if self.kind == "error" else "yellow" + + yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]" + yield "" + + if not options.ascii_only: + # Present the main message, with relevant context indented. + if self.context is not None: + yield _prefix_with_indent( + self.message, + console, + prefix=f"[{colour}]×[/] ", + indent=f"[{colour}]│[/] ", + ) + yield _prefix_with_indent( + self.context, + console, + prefix=f"[{colour}]╰─>[/] ", + indent=f"[{colour}] [/] ", + ) + else: + yield _prefix_with_indent( + self.message, + console, + prefix="[red]×[/] ", + indent=" ", + ) + else: + yield self.message + if self.context is not None: + yield "" + yield self.context + + if self.note_stmt is not None or self.hint_stmt is not None: + yield "" + + if self.note_stmt is not None: + yield _prefix_with_indent( + self.note_stmt, + console, + prefix="[magenta bold]note[/]: ", + indent=" ", + ) + if self.hint_stmt is not None: + yield _prefix_with_indent( + self.hint_stmt, + console, + prefix="[cyan bold]hint[/]: ", + indent=" ", + ) + + if self.link is not None: + yield "" + yield f"Link: {self.link}" + + +# +# Actual Errors +# +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class MissingPyProjectBuildRequires(DiagnosticPipError): + """Raised when pyproject.toml has `build-system`, but no `build-system.requires`.""" + + reference = "missing-pyproject-build-system-requires" + + def __init__(self, *, package: str) -> None: + super().__init__( + message=f"Can not process {escape(package)}", + context=Text( + "This package has an invalid pyproject.toml file.\n" + "The [build-system] table is missing the mandatory `requires` key." + ), + note_stmt="This is an issue with the package mentioned above, not pip.", + hint_stmt=Text("See PEP 518 for the detailed specification."), + ) + + +class InvalidPyProjectBuildRequires(DiagnosticPipError): + """Raised when pyproject.toml an invalid `build-system.requires`.""" + + reference = "invalid-pyproject-build-system-requires" + + def __init__(self, *, package: str, reason: str) -> None: + super().__init__( + message=f"Can not process {escape(package)}", + context=Text( + "This package has an invalid `build-system.requires` key in " + f"pyproject.toml.\n{reason}" + ), + note_stmt="This is an issue with the package mentioned above, not pip.", + hint_stmt=Text("See PEP 518 for the detailed specification."), + ) + + +class NoneMetadataError(PipError): + """Raised when accessing a Distribution's "METADATA" or "PKG-INFO". + + This signifies an inconsistency, when the Distribution claims to have + the metadata file (if not, raise ``FileNotFoundError`` instead), but is + not actually able to produce its content. This may be due to permission + errors. + """ + + def __init__( + self, + dist: "BaseDistribution", + metadata_name: str, + ) -> None: + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self) -> str: + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return "None {} metadata found for distribution: {}".format( + self.metadata_name, + self.dist, + ) + + +class UserInstallationInvalid(InstallationError): + """A --user install is requested on an environment without user site.""" + + def __str__(self) -> str: + return "User base directory is not specified" + + +class InvalidSchemeCombination(InstallationError): + def __str__(self) -> str: + before = ", ".join(str(a) for a in self.args[:-1]) + return f"Cannot set {before} and {self.args[-1]} together" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class NetworkConnectionError(PipError): + """HTTP connection error""" + + def __init__( + self, error_msg: str, response: Response = None, request: Request = None + ) -> None: + """ + Initialize NetworkConnectionError with `request` and `response` + objects. + """ + self.response = response + self.request = request + self.error_msg = error_msg + if ( + self.response is not None + and not self.request + and hasattr(response, "request") + ): + self.request = self.response.request + super().__init__(error_msg, response, request) + + def __str__(self) -> str: + return str(self.error_msg) + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class InvalidWheel(InstallationError): + """Invalid (e.g. corrupt) wheel.""" + + def __init__(self, location: str, name: str): + self.location = location + self.name = name + + def __str__(self) -> str: + return f"Wheel '{self.name}' located at {self.location} is invalid." + + +class MetadataInconsistent(InstallationError): + """Built metadata contains inconsistent information. + + This is raised when the metadata contains values (e.g. name and version) + that do not match the information previously obtained from sdist filename + or user-supplied ``#egg=`` value. + """ + + def __init__( + self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str + ) -> None: + self.ireq = ireq + self.field = field + self.f_val = f_val + self.m_val = m_val + + def __str__(self) -> str: + template = ( + "Requested {} has inconsistent {}: " + "filename has {!r}, but metadata has {!r}" + ) + return template.format(self.ireq, self.field, self.f_val, self.m_val) + + +class LegacyInstallFailure(DiagnosticPipError): + """Error occurred while executing `setup.py install`""" + + reference = "legacy-install-failure" + + def __init__(self, package_details: str) -> None: + super().__init__( + message="Encountered error while trying to install package.", + context=package_details, + hint_stmt="See above for output from the failure.", + note_stmt="This is an issue with the package mentioned above, not pip.", + ) + + +class InstallationSubprocessError(DiagnosticPipError, InstallationError): + """A subprocess call failed.""" + + reference = "subprocess-exited-with-error" + + def __init__( + self, + *, + command_description: str, + exit_code: int, + output_lines: Optional[List[str]], + ) -> None: + if output_lines is None: + output_prompt = Text("See above for output.") + else: + output_prompt = ( + Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n") + + Text("".join(output_lines)) + + Text.from_markup(R"[red]\[end of output][/]") + ) + + super().__init__( + message=( + f"[green]{escape(command_description)}[/] did not run successfully.\n" + f"exit code: {exit_code}" + ), + context=output_prompt, + hint_stmt=None, + note_stmt=( + "This error originates from a subprocess, and is likely not a " + "problem with pip." + ), + ) + + self.command_description = command_description + self.exit_code = exit_code + + def __str__(self) -> str: + return f"{self.command_description} exited with {self.exit_code}" + + +class MetadataGenerationFailed(InstallationSubprocessError, InstallationError): + reference = "metadata-generation-failed" + + def __init__( + self, + *, + package_details: str, + ) -> None: + super(InstallationSubprocessError, self).__init__( + message="Encountered error while generating package metadata.", + context=escape(package_details), + hint_stmt="See above for details.", + note_stmt="This is an issue with the package mentioned above, not pip.", + ) + + def __str__(self) -> str: + return "metadata generation failed" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self) -> None: + self.errors: List["HashError"] = [] + + def append(self, error: "HashError") -> None: + self.errors.append(error) + + def __str__(self) -> str: + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return "\n".join(lines) + return "" + + def __bool__(self) -> bool: + return bool(self.errors) + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + + req: Optional["InstallRequirement"] = None + head = "" + order: int = -1 + + def body(self) -> str: + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + its link already populated by the resolver's _populate_link(). + + """ + return f" {self._requirement_name()}" + + def __str__(self) -> str: + return f"{self.head}\n{self.body()}" + + def _requirement_name(self) -> str: + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else "unknown package" + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ( + "Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:" + ) + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ( + "Can't verify hashes for these file:// requirements because they " + "point to directories:" + ) + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ( + "Hashes are required in --require-hashes mode, but they are " + "missing from some requirements. Here is a list of those " + "requirements along with the hashes their downloaded archives " + "actually had. Add lines like these to your requirements files to " + "prevent tampering. (If you did not enable --require-hashes " + "manually, note that it turns on automatically when any package " + "has a hash.)" + ) + + def __init__(self, gotten_hash: str) -> None: + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self) -> str: + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = ( + self.req.original_link + if self.req.original_link + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, "req", None) + ) + return " {} --hash={}:{}".format( + package or "unknown package", FAVORITE_HASH, self.gotten_hash + ) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ( + "In --require-hashes mode, all requirements must have their " + "versions pinned with ==. These do not:" + ) + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + + order = 4 + head = ( + "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS " + "FILE. If you have updated the package versions, please update " + "the hashes. Otherwise, examine the package contents carefully; " + "someone may have tampered with them." + ) + + def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None: + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self) -> str: + return " {}:\n{}".format(self._requirement_name(), self._hash_comparison()) + + def _hash_comparison(self) -> str: + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + + def hash_then_or(hash_name: str) -> "chain[str]": + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(" or")) + + lines: List[str] = [] + for hash_name, expecteds in self.allowed.items(): + prefix = hash_then_or(hash_name) + lines.extend( + (" Expected {} {}".format(next(prefix), e)) for e in expecteds + ) + lines.append( + " Got {}\n".format(self.gots[hash_name].hexdigest()) + ) + return "\n".join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" + + +class ConfigurationFileCouldNotBeLoaded(ConfigurationError): + """When there are errors while loading a configuration file""" + + def __init__( + self, + reason: str = "could not be loaded", + fname: Optional[str] = None, + error: Optional[configparser.Error] = None, + ) -> None: + super().__init__(error) + self.reason = reason + self.fname = fname + self.error = error + + def __str__(self) -> str: + if self.fname is not None: + message_part = f" in {self.fname}." + else: + assert self.error is not None + message_part = f".\n{self.error}\n" + return f"Configuration file {self.reason}{message_part}" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 000000000..7a17b7b3b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..5264a4122 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc new file mode 100644 index 000000000..8f6bc8f6f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc new file mode 100644 index 000000000..f9d4c6747 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc new file mode 100644 index 000000000..acf96a3ce Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/collector.py b/myenv/lib/python3.10/site-packages/pip/_internal/index/collector.py new file mode 100644 index 000000000..4ecbb3378 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,648 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_sources(). +""" + +import cgi +import collections +import functools +import itertools +import logging +import os +import re +import urllib.parse +import urllib.request +import xml.etree.ElementTree +from html.parser import HTMLParser +from optparse import Values +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + MutableMapping, + NamedTuple, + Optional, + Sequence, + Tuple, + Union, +) + +from pip._vendor import html5lib, requests +from pip._vendor.requests import Response +from pip._vendor.requests.exceptions import RetryError, SSLError + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import pairwise, redact_auth_from_url +from pip._internal.vcs import vcs + +from .sources import CandidatesFromPage, LinkSource, build_source + +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + +logger = logging.getLogger(__name__) + +HTMLElement = xml.etree.ElementTree.Element +ResponseHeaders = MutableMapping[str, str] + + +def _match_vcs_scheme(url: str) -> Optional[str]: + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in "+:": + return scheme + return None + + +class _NotHTML(Exception): + def __init__(self, content_type: str, request_desc: str) -> None: + super().__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response: Response) -> None: + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url: str, session: PipSession) -> None: + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) + if scheme not in {"http", "https"}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + raise_for_status(resp) + + _ensure_html_header(resp) + + +def _get_html_response(url: str, session: PipSession) -> Response: + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if is_archive_file(Link(url).filename): + _ensure_html_response(url, session=session) + + logger.debug("Getting page %s", redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + raise_for_status(resp) + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: + """Determine if we have any encoding information in our headers.""" + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params["charset"] + return None + + +def _determine_base_url(document: HTMLElement, page_url: str) -> str: + """Determine the HTML document's base URL. + + This looks for a ```` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + + TODO: Remove when `html5lib` is dropped. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_url_path_part(part: str) -> str: + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib.parse.quote(urllib.parse.unquote(part)) + + +def _clean_file_url_path(part: str) -> str: + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib.request.pathname2url(urllib.request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) + + +def _clean_url_path(path: str, is_local_path: bool) -> str: + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [""])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return "".join(cleaned_parts) + + +def _clean_link(url: str) -> str: + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib.parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib.parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + element_attribs: Dict[str, Optional[str]], + page_url: str, + base_url: str, +) -> Optional[Link]: + """ + Convert an anchor element's attributes in a simple repository page to a Link. + """ + href = element_attribs.get("href") + if not href: + return None + + url = _clean_link(urllib.parse.urljoin(base_url, href)) + pyrequire = element_attribs.get("data-requires-python") + yanked_reason = element_attribs.get("data-yanked") + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +class CacheablePageContent: + def __init__(self, page: "HTMLPage") -> None: + assert page.cache_link_parsing + self.page = page + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self.page.url == other.page.url + + def __hash__(self) -> int: + return hash(self.page.url) + + +class ParseLinks(Protocol): + def __call__( + self, page: "HTMLPage", use_deprecated_html5lib: bool + ) -> Iterable[Link]: + ... + + +def with_cached_html_pages(fn: ParseLinks) -> ParseLinks: + """ + Given a function that parses an Iterable[Link] from an HTMLPage, cache the + function's result (keyed by CacheablePageContent), unless the HTMLPage + `page` has `page.cache_link_parsing == False`. + """ + + @functools.lru_cache(maxsize=None) + def wrapper( + cacheable_page: CacheablePageContent, use_deprecated_html5lib: bool + ) -> List[Link]: + return list(fn(cacheable_page.page, use_deprecated_html5lib)) + + @functools.wraps(fn) + def wrapper_wrapper(page: "HTMLPage", use_deprecated_html5lib: bool) -> List[Link]: + if page.cache_link_parsing: + return wrapper(CacheablePageContent(page), use_deprecated_html5lib) + return list(fn(page, use_deprecated_html5lib)) + + return wrapper_wrapper + + +def _parse_links_html5lib(page: "HTMLPage") -> Iterable[Link]: + """ + Parse an HTML document, and yield its anchor elements as Link objects. + + TODO: Remove when `html5lib` is dropped. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor.attrib, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +@with_cached_html_pages +def parse_links(page: "HTMLPage", use_deprecated_html5lib: bool) -> Iterable[Link]: + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + encoding = page.encoding or "utf-8" + + # Check if the page starts with a valid doctype, to decide whether to use + # http.parser or (deprecated) html5lib for parsing -- unless explicitly + # requested to use html5lib. + if not use_deprecated_html5lib: + expected_doctype = "".encode(encoding) + actual_start = page.content[: len(expected_doctype)] + if actual_start.decode(encoding).lower() != "": + deprecated( + reason=( + f"The HTML index page being used ({page.url}) is not a proper " + "HTML 5 document. This is in violation of PEP 503 which requires " + "these pages to be well-formed HTML 5 documents. Please reach out " + "to the owners of this index page, and ask them to update this " + "index page to a valid HTML 5 document." + ), + replacement=None, + gone_in="22.2", + issue=10825, + ) + use_deprecated_html5lib = True + + if use_deprecated_html5lib: + yield from _parse_links_html5lib(page) + return + + parser = HTMLLinkParser() + parser.feed(page.content.decode(encoding)) + + url = page.url + base_url = parser.base_url or url + for anchor in parser.anchors: + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage: + """Represents one page, along with its URL""" + + def __init__( + self, + content: bytes, + encoding: Optional[str], + url: str, + cache_link_parsing: bool = True, + ) -> None: + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + :param cache_link_parsing: whether links parsed from this page's url + should be cached. PyPI index urls should + have this set to False, for example. + """ + self.content = content + self.encoding = encoding + self.url = url + self.cache_link_parsing = cache_link_parsing + + def __str__(self) -> str: + return redact_auth_from_url(self.url) + + +class HTMLLinkParser(HTMLParser): + """ + HTMLParser that keeps the first base HREF and a list of all anchor + elements' attributes. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._seen_decl = False + self.base_url: Optional[str] = None + self.anchors: List[Dict[str, Optional[str]]] = [] + + def handle_decl(self, decl: str) -> None: + if decl.lower() != "doctype html": + self._raise_error() + self._seen_decl = True + + def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: + if not self._seen_decl: + self._raise_error() + + if tag == "base" and self.base_url is None: + href = self.get_href(attrs) + if href is not None: + self.base_url = href + elif tag == "a": + self.anchors.append(dict(attrs)) + + def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]: + for name, value in attrs: + if name == "href": + return value + return None + + def _raise_error(self) -> None: + raise ValueError( + "HTML doctype missing or incorrect. Expected .\n\n" + "If you believe this error to be incorrect, try passing the " + "command line option --use-deprecated=html5lib and please leave " + "a comment on the pip issue at https://github.com/pypa/pip/issues/10825." + ) + + +def _handle_get_page_fail( + link: Link, + reason: Union[str, Exception], + meth: Optional[Callable[..., None]] = None, +) -> None: + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage: + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage( + response.content, + encoding=encoding, + url=response.url, + cache_link_parsing=cache_link_parsing, + ) + + +def _get_html_page( + link: Link, session: Optional[PipSession] = None +) -> Optional["HTMLPage"]: + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split("#", 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.warning( + "Cannot look at %s URL %s because it does not support lookup as web pages.", + vcs_scheme, + link, + ) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib.parse.urlparse(url) + if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith("/"): + url += "/" + url = urllib.parse.urljoin(url, "index.html") + logger.debug(" file: URL is directory, getting %s", url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.warning( + "Skipping page %s because it looks like an archive, and cannot " + "be checked by a HTTP HEAD request.", + link, + ) + except _NotHTML as exc: + logger.warning( + "Skipping page %s because the %s request got Content-Type: %s." + "The only supported Content-Type is text/html", + link, + exc.request_desc, + exc.content_type, + ) + except NetworkConnectionError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, f"connection error: {exc}") + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp, cache_link_parsing=link.cache_link_parsing) + return None + + +class CollectedSources(NamedTuple): + find_links: Sequence[Optional[LinkSource]] + index_urls: Sequence[Optional[LinkSource]] + + +class LinkCollector: + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_sources() method. + """ + + def __init__( + self, + session: PipSession, + search_scope: SearchScope, + ) -> None: + self.search_scope = search_scope + self.session = session + + @classmethod + def create( + cls, + session: PipSession, + options: Values, + suppress_no_index: bool = False, + ) -> "LinkCollector": + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + "Ignoring indexes: %s", + ",".join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, + index_urls=index_urls, + ) + link_collector = LinkCollector( + session=session, + search_scope=search_scope, + ) + return link_collector + + @property + def find_links(self) -> List[str]: + return self.search_scope.find_links + + def fetch_page(self, location: Link) -> Optional[HTMLPage]: + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_sources( + self, + project_name: str, + candidates_from_page: CandidatesFromPage, + ) -> CollectedSources: + # The OrderedDict calls deduplicate sources by URL. + index_url_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=False, + cache_link_parsing=False, + ) + for loc in self.search_scope.get_index_urls_locations(project_name) + ).values() + find_links_sources = collections.OrderedDict( + build_source( + loc, + candidates_from_page=candidates_from_page, + page_validator=self.session.is_secure_origin, + expand_dir=True, + cache_link_parsing=True, + ) + for loc in self.find_links + ).values() + + if logger.isEnabledFor(logging.DEBUG): + lines = [ + f"* {s.link}" + for s in itertools.chain(find_links_sources, index_url_sources) + if s is not None and s.link is not None + ] + lines = [ + f"{len(lines)} location(s) to search " + f"for versions of {project_name}:" + ] + lines + logger.debug("\n".join(lines)) + + return CollectedSources( + find_links=list(find_links_sources), + index_urls=list(index_url_sources), + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/package_finder.py b/myenv/lib/python3.10/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 000000000..223d06df6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1004 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import functools +import itertools +import logging +import re +from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import _BaseVersion +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import LinkCollector, parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.search_scope import SearchScope +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.req import InstallRequirement +from pip._internal.utils._log import getLogger +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS + +__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] + + +logger = getLogger(__name__) + +BuildTag = Union[Tuple[()], Tuple[int, str]] +CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag] + + +def _check_link_requires_python( + link: Link, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> bool: + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, + link, + ) + else: + if not is_compatible: + version = ".".join(map(str, version_info)) + if not ignore_requires_python: + logger.verbose( + "Link requires a different Python (%s not in: %r): %s", + version, + link.requires_python, + link, + ) + return False + + logger.debug( + "Ignoring failed Requires-Python check (%s not in: %r) for link: %s", + version, + link.requires_python, + link, + ) + + return True + + +class LinkEvaluator: + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$") + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name: str, + canonical_name: str, + formats: FrozenSet[str], + target_python: TargetPython, + allow_yanked: bool, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or "" + return (False, f"yanked for reason: {reason}") + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, "not a file") + if ext not in SUPPORTED_EXTENSIONS: + return (False, f"unsupported archive format: {ext}") + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = "No binaries permitted for {}".format(self.project_name) + return (False, reason) + if "macosx10" in link.path and ext == ".zip": + return (False, "macosx10 one") + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, "invalid wheel filename") + if canonicalize_name(wheel.name) != self._canonical_name: + reason = "wrong project name (not {})".format(self.project_name) + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags ({}) are compatible " + "(run pip debug --verbose to show compatible tags)".format( + ", ".join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + reason = f"No sources permitted for {self.project_name}" + return (False, reason) + + if not version: + version = _extract_version_from_fragment( + egg_info, + self._canonical_name, + ) + if not version: + reason = f"Missing project version for {self.project_name}" + return (False, reason) + + match = self._py_version_re.search(version) + if match: + version = version[: match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, "Python version is incorrect") + + supports_python = _check_link_requires_python( + link, + version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug("Found link %s, version: %s", link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates: List[InstallationCandidate], + hashes: Hashes, + project_name: str, +) -> List[InstallationCandidate]: + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + "Given no hashes to check %s links for project %r: " + "discarding no candidates", + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = "discarding no candidates" + else: + discard_message = "discarding {} non-matches:\n {}".format( + len(non_matches), + "\n ".join(str(candidate.link) for candidate in non_matches), + ) + + logger.debug( + "Checked %s links for project %r against %s hashes " + "(%s matches, %s no digest): %s", + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message, + ) + + return filtered + + +class CandidatePreferences: + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + ) -> None: + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult: + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates: List[InstallationCandidate], + applicable_candidates: List[InstallationCandidate], + best_candidate: Optional[InstallationCandidate], + ) -> None: + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self) -> Iterable[InstallationCandidate]: + """Iterate through all candidates.""" + return iter(self._candidates) + + def iter_applicable(self) -> Iterable[InstallationCandidate]: + """Iterate through the applicable candidates.""" + return iter(self._applicable_candidates) + + +class CandidateEvaluator: + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name: str, + target_python: Optional[TargetPython] = None, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> "CandidateEvaluator": + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name: str, + supported_tags: List[Tag], + specifier: specifiers.BaseSpecifier, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + hashes: Optional[Hashes] = None, + ) -> None: + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + # Since the index of the tag in the _supported_tags list is used + # as a priority, precompute a map from tag to index/priority to be + # used in wheel.find_most_preferred_tag. + self._wheel_tag_preferences = { + tag: idx for idx, tag in enumerate(supported_tags) + } + + def get_applicable_candidates( + self, + candidates: List[InstallationCandidate], + ) -> List[InstallationCandidate]: + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) + for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [c for c in candidates if str(c.version) in versions] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag: BuildTag = () + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + try: + pri = -( + wheel.find_most_preferred_tag( + valid_tags, self._wheel_tag_preferences + ) + ) + except ValueError: + raise UnsupportedWheel( + "{} is not a supported wheel for this platform. It " + "can't be sorted.".format(wheel.filename) + ) + if self._prefer_binary: + binary_preference = 1 + if wheel.build_tag is not None: + match = re.match(r"^(\d+)(.*)$", wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, + yank_value, + binary_preference, + candidate.version, + pri, + build_tag, + ) + + def sort_best_candidate( + self, + candidates: List[InstallationCandidate], + ) -> Optional[InstallationCandidate]: + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + best_candidate = max(candidates, key=self._sort_key) + return best_candidate + + def compute_best_candidate( + self, + candidates: List[InstallationCandidate], + ) -> BestCandidateResult: + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder: + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector: LinkCollector, + target_python: TargetPython, + allow_yanked: bool, + use_deprecated_html5lib: bool, + format_control: Optional[FormatControl] = None, + candidate_prefs: Optional[CandidatePreferences] = None, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + self._use_deprecated_html5lib = use_deprecated_html5lib + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links: Set[Link] = set() + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector: LinkCollector, + selection_prefs: SelectionPreferences, + target_python: Optional[TargetPython] = None, + *, + use_deprecated_html5lib: bool, + ) -> "PackageFinder": + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + use_deprecated_html5lib=use_deprecated_html5lib, + ) + + @property + def target_python(self) -> TargetPython: + return self._target_python + + @property + def search_scope(self) -> SearchScope: + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope: SearchScope) -> None: + self._link_collector.search_scope = search_scope + + @property + def find_links(self) -> List[str]: + return self._link_collector.find_links + + @property + def index_urls(self) -> List[str]: + return self.search_scope.index_urls + + @property + def trusted_hosts(self) -> Iterable[str]: + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self) -> bool: + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self) -> None: + self._candidate_prefs.allow_all_prereleases = True + + @property + def prefer_binary(self) -> bool: + return self._candidate_prefs.prefer_binary + + def set_prefer_binary(self) -> None: + self._candidate_prefs.prefer_binary = True + + def make_link_evaluator(self, project_name: str) -> LinkEvaluator: + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links: Iterable[Link]) -> List[Link]: + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen: Set[Link] = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link: Link, reason: str) -> None: + if link not in self._logged_links: + # Put the link at the end so the reason is more visible and because + # the link string is usually very long. + logger.debug("Skipping link: %s: %s", reason, link) + self._logged_links.add(link) + + def get_install_candidate( + self, link_evaluator: LinkEvaluator, link: Link + ) -> Optional[InstallationCandidate]: + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + version=result, + ) + + def evaluate_links( + self, link_evaluator: LinkEvaluator, links: Iterable[Link] + ) -> List[InstallationCandidate]: + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url( + self, project_url: Link, link_evaluator: LinkEvaluator + ) -> List[InstallationCandidate]: + logger.debug( + "Fetching project page and analyzing links: %s", + project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page, self._use_deprecated_html5lib)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + @functools.lru_cache(maxsize=None) + def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]: + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + link_evaluator = self.make_link_evaluator(project_name) + + collected_sources = self._link_collector.collect_sources( + project_name=project_name, + candidates_from_page=functools.partial( + self.process_project_url, + link_evaluator=link_evaluator, + ), + ) + + page_candidates_it = itertools.chain.from_iterable( + source.page_candidates() + for sources in collected_sources + for source in sources + if source is not None + ) + page_candidates = list(page_candidates_it) + + file_links_it = itertools.chain.from_iterable( + source.file_links() + for sources in collected_sources + for source in sources + if source is not None + ) + file_candidates = self.evaluate_links( + link_evaluator, + sorted(file_links_it, reverse=True), + ) + + if logger.isEnabledFor(logging.DEBUG) and file_candidates: + paths = [] + for candidate in file_candidates: + assert candidate.link.url # we need to have a URL + try: + paths.append(candidate.link.file_path) + except Exception: + paths.append(candidate.link.url) # it's not a local file + + logger.debug("Local files found: %s", ", ".join(paths)) + + # This is an intentional priority ordering + return file_candidates + page_candidates + + def make_candidate_evaluator( + self, + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> CandidateEvaluator: + """Create a CandidateEvaluator object to use.""" + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + @functools.lru_cache(maxsize=None) + def find_best_candidate( + self, + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> BestCandidateResult: + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement( + self, req: InstallRequirement, upgrade: bool + ) -> Optional[InstallationCandidate]: + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a InstallationCandidate if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, + specifier=req.specifier, + hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version: Optional[_BaseVersion] = None + if req.satisfied_by is not None: + installed_version = req.satisfied_by.version + + def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ( + ", ".join( + sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + ) + ) + or "none" + ) + + if installed_version is None and best_candidate is None: + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + "No matching distribution found for {}".format(req) + ) + + best_installed = False + if installed_version and ( + best_candidate is None or best_candidate.version <= installed_version + ): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + "Existing installed version (%s) is most up-to-date and " + "satisfies requirement", + installed_version, + ) + else: + logger.debug( + "Existing installed version (%s) satisfies requirement " + "(most up-to-date version is %s)", + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + "Installed version (%s) is most up-to-date (past versions: %s)", + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + "Using version %s (newest of versions: %s)", + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate + + +def _find_name_version_sep(fragment: str, canonical_name: str) -> int: + """Find the separator's index based on the package's canonical name. + + :param fragment: A + filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError(f"{fragment} does not match {canonical_name}") + + +def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]: + """Parse the version string from a + filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/index/sources.py b/myenv/lib/python3.10/site-packages/pip/_internal/index/sources.py new file mode 100644 index 000000000..eec3f12f7 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/index/sources.py @@ -0,0 +1,224 @@ +import logging +import mimetypes +import os +import pathlib +from typing import Callable, Iterable, Optional, Tuple + +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url + +logger = logging.getLogger(__name__) + +FoundCandidates = Iterable[InstallationCandidate] +FoundLinks = Iterable[Link] +CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]] +PageValidator = Callable[[Link], bool] + + +class LinkSource: + @property + def link(self) -> Optional[Link]: + """Returns the underlying link, if there's one.""" + raise NotImplementedError() + + def page_candidates(self) -> FoundCandidates: + """Candidates found by parsing an archive listing HTML file.""" + raise NotImplementedError() + + def file_links(self) -> FoundLinks: + """Links found by specifying archives directly.""" + raise NotImplementedError() + + +def _is_html_file(file_url: str) -> bool: + return mimetypes.guess_type(file_url, strict=False)[0] == "text/html" + + +class _FlatDirectorySource(LinkSource): + """Link source specified by ``--find-links=``. + + This looks the content of the directory, and returns: + + * ``page_candidates``: Links listed on each HTML file in the directory. + * ``file_candidates``: Archives in the directory. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + path: str, + ) -> None: + self._candidates_from_page = candidates_from_page + self._path = pathlib.Path(os.path.realpath(path)) + + @property + def link(self) -> Optional[Link]: + return None + + def page_candidates(self) -> FoundCandidates: + for path in self._path.iterdir(): + url = path_to_url(str(path)) + if not _is_html_file(url): + continue + yield from self._candidates_from_page(Link(url)) + + def file_links(self) -> FoundLinks: + for path in self._path.iterdir(): + url = path_to_url(str(path)) + if _is_html_file(url): + continue + yield Link(url) + + +class _LocalFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to + the option, it is converted to a URL first. This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not _is_html_file(self._link.url): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + if _is_html_file(self._link.url): + return + yield self._link + + +class _RemoteFileSource(LinkSource): + """``--find-links=`` or ``--[extra-]index-url=``. + + This returns: + + * ``page_candidates``: Links listed on an HTML file. + * ``file_candidates``: The non-HTML file. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._page_validator = page_validator + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + if not self._page_validator(self._link): + return + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + yield self._link + + +class _IndexDirectorySource(LinkSource): + """``--[extra-]index-url=``. + + This is treated like a remote URL; ``candidates_from_page`` contains logic + for this by appending ``index.html`` to the link. + """ + + def __init__( + self, + candidates_from_page: CandidatesFromPage, + link: Link, + ) -> None: + self._candidates_from_page = candidates_from_page + self._link = link + + @property + def link(self) -> Optional[Link]: + return self._link + + def page_candidates(self) -> FoundCandidates: + yield from self._candidates_from_page(self._link) + + def file_links(self) -> FoundLinks: + return () + + +def build_source( + location: str, + *, + candidates_from_page: CandidatesFromPage, + page_validator: PageValidator, + expand_dir: bool, + cache_link_parsing: bool, +) -> Tuple[Optional[str], Optional[LinkSource]]: + + path: Optional[str] = None + url: Optional[str] = None + if os.path.exists(location): # Is a local path. + url = path_to_url(location) + path = location + elif location.startswith("file:"): # A file: URL. + url = location + path = url_to_path(location) + elif is_url(location): + url = location + + if url is None: + msg = ( + "Location '%s' is ignored: " + "it is either a non-existing path or lacks a specific scheme." + ) + logger.warning(msg, location) + return (None, None) + + if path is None: + source: LinkSource = _RemoteFileSource( + candidates_from_page=candidates_from_page, + page_validator=page_validator, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + + if os.path.isdir(path): + if expand_dir: + source = _FlatDirectorySource( + candidates_from_page=candidates_from_page, + path=path, + ) + else: + source = _IndexDirectorySource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + elif os.path.isfile(path): + source = _LocalFileSource( + candidates_from_page=candidates_from_page, + link=Link(url, cache_link_parsing=cache_link_parsing), + ) + return (url, source) + logger.warning( + "Location '%s' is ignored: it is neither a file nor a directory.", + location, + ) + return (url, None) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__init__.py new file mode 100644 index 000000000..ac0c166e5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__init__.py @@ -0,0 +1,520 @@ +import functools +import logging +import os +import pathlib +import sys +import sysconfig +from typing import Any, Dict, Iterator, List, Optional, Tuple + +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.virtualenv import running_under_virtualenv + +from . import _distutils, _sysconfig +from .base import ( + USER_CACHE_DIR, + get_major_minor_version, + get_src_prefix, + is_osx_framework, + site_packages, + user_site, +) + +__all__ = [ + "USER_CACHE_DIR", + "get_bin_prefix", + "get_bin_user", + "get_major_minor_version", + "get_platlib", + "get_prefixed_libs", + "get_purelib", + "get_scheme", + "get_src_prefix", + "site_packages", + "user_site", +] + + +logger = logging.getLogger(__name__) + + +_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib") + +_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10) + + +def _should_use_sysconfig() -> bool: + """This function determines the value of _USE_SYSCONFIG. + + By default, pip uses sysconfig on Python 3.10+. + But Python distributors can override this decision by setting: + sysconfig._PIP_USE_SYSCONFIG = True / False + Rationale in https://github.com/pypa/pip/issues/10647 + + This is a function for testability, but should be constant during any one + run. + """ + return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT)) + + +_USE_SYSCONFIG = _should_use_sysconfig() + +# Be noisy about incompatibilities if this platforms "should" be using +# sysconfig, but is explicitly opting out and using distutils instead. +if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG: + _MISMATCH_LEVEL = logging.WARNING +else: + _MISMATCH_LEVEL = logging.DEBUG + + +def _looks_like_bpo_44860() -> bool: + """The resolution to bpo-44860 will change this incorrect platlib. + + See . + """ + from distutils.command.install import INSTALL_SCHEMES # type: ignore + + try: + unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] + except KeyError: + return False + return unix_user_platlib == "$usersite" + + +def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool: + platlib = scheme["platlib"] + if "/$platlibdir/" in platlib: + platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/") + if "/lib64/" not in platlib: + return False + unpatched = platlib.replace("/lib64/", "/lib/") + return unpatched.replace("$platbase/", "$base/") == scheme["purelib"] + + +@functools.lru_cache(maxsize=None) +def _looks_like_red_hat_lib() -> bool: + """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. + + This is the only way I can see to tell a Red Hat-patched Python. + """ + from distutils.command.install import INSTALL_SCHEMES # type: ignore + + return all( + k in INSTALL_SCHEMES + and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) + for k in ("unix_prefix", "unix_home") + ) + + +@functools.lru_cache(maxsize=None) +def _looks_like_debian_scheme() -> bool: + """Debian adds two additional schemes.""" + from distutils.command.install import INSTALL_SCHEMES # type: ignore + + return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES + + +@functools.lru_cache(maxsize=None) +def _looks_like_red_hat_scheme() -> bool: + """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. + + Red Hat's ``00251-change-user-install-location.patch`` changes the install + command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is + (fortunately?) done quite unconditionally, so we create a default command + object without any configuration to detect this. + """ + from distutils.command.install import install + from distutils.dist import Distribution + + cmd: Any = install(Distribution()) + cmd.finalize_options() + return ( + cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" + and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" + ) + + +@functools.lru_cache(maxsize=None) +def _looks_like_slackware_scheme() -> bool: + """Slackware patches sysconfig but fails to patch distutils and site. + + Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib + path, but does not do the same to the site module. + """ + if user_site is None: # User-site not available. + return False + try: + paths = sysconfig.get_paths(scheme="posix_user", expand=False) + except KeyError: # User-site not available. + return False + return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site + + +@functools.lru_cache(maxsize=None) +def _looks_like_msys2_mingw_scheme() -> bool: + """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. + + However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is + likely going to be included in their 3.10 release, so we ignore the warning. + See msys2/MINGW-packages#9319. + + MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, + and is missing the final ``"site-packages"``. + """ + paths = sysconfig.get_paths("nt", expand=False) + return all( + "Lib" not in p and "lib" in p and not p.endswith("site-packages") + for p in (paths[key] for key in ("platlib", "purelib")) + ) + + +def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]: + ldversion = sysconfig.get_config_var("LDVERSION") + abiflags: str = getattr(sys, "abiflags", None) + + # LDVERSION does not end with sys.abiflags. Just return the path unchanged. + if not ldversion or not abiflags or not ldversion.endswith(abiflags): + yield from parts + return + + # Strip sys.abiflags from LDVERSION-based path components. + for part in parts: + if part.endswith(ldversion): + part = part[: (0 - len(abiflags))] + yield part + + +@functools.lru_cache(maxsize=None) +def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None: + issue_url = "https://github.com/pypa/pip/issues/10151" + message = ( + "Value for %s does not match. Please report this to <%s>" + "\ndistutils: %s" + "\nsysconfig: %s" + ) + logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) + + +def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: + if old == new: + return False + _warn_mismatched(old, new, key=key) + return True + + +@functools.lru_cache(maxsize=None) +def _log_context( + *, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + prefix: Optional[str] = None, +) -> None: + parts = [ + "Additional context:", + "user = %r", + "home = %r", + "root = %r", + "prefix = %r", + ] + + logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix) + + +def get_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: + new = _sysconfig.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + if _USE_SYSCONFIG: + return new + + old = _distutils.get_scheme( + dist_name, + user=user, + home=home, + root=root, + isolated=isolated, + prefix=prefix, + ) + + warning_contexts = [] + for k in SCHEME_KEYS: + old_v = pathlib.Path(getattr(old, k)) + new_v = pathlib.Path(getattr(new, k)) + + if old_v == new_v: + continue + + # distutils incorrectly put PyPy packages under ``site-packages/python`` + # in the ``posix_home`` scheme, but PyPy devs said they expect the + # directory name to be ``pypy`` instead. So we treat this as a bug fix + # and not warn about it. See bpo-43307 and python/cpython#24628. + skip_pypy_special_case = ( + sys.implementation.name == "pypy" + and home is not None + and k in ("platlib", "purelib") + and old_v.parent == new_v.parent + and old_v.name.startswith("python") + and new_v.name.startswith("pypy") + ) + if skip_pypy_special_case: + continue + + # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in + # the ``include`` value, but distutils's ``headers`` does. We'll let + # CPython decide whether this is a bug or feature. See bpo-43948. + skip_osx_framework_user_special_case = ( + user + and is_osx_framework() + and k == "headers" + and old_v.parent.parent == new_v.parent + and old_v.parent.name.startswith("python") + ) + if skip_osx_framework_user_special_case: + continue + + # On Red Hat and derived Linux distributions, distutils is patched to + # use "lib64" instead of "lib" for platlib. + if k == "platlib" and _looks_like_red_hat_lib(): + continue + + # On Python 3.9+, sysconfig's posix_user scheme sets platlib against + # sys.platlibdir, but distutils's unix_user incorrectly coninutes + # using the same $usersite for both platlib and purelib. This creates a + # mismatch when sys.platlibdir is not "lib". + skip_bpo_44860 = ( + user + and k == "platlib" + and not WINDOWS + and sys.version_info >= (3, 9) + and _PLATLIBDIR != "lib" + and _looks_like_bpo_44860() + ) + if skip_bpo_44860: + continue + + # Slackware incorrectly patches posix_user to use lib64 instead of lib, + # but not usersite to match the location. + skip_slackware_user_scheme = ( + user + and k in ("platlib", "purelib") + and not WINDOWS + and _looks_like_slackware_scheme() + ) + if skip_slackware_user_scheme: + continue + + # Both Debian and Red Hat patch Python to place the system site under + # /usr/local instead of /usr. Debian also places lib in dist-packages + # instead of site-packages, but the /usr/local check should cover it. + skip_linux_system_special_case = ( + not (user or home or prefix or running_under_virtualenv()) + and old_v.parts[1:3] == ("usr", "local") + and len(new_v.parts) > 1 + and new_v.parts[1] == "usr" + and (len(new_v.parts) < 3 or new_v.parts[2] != "local") + and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme()) + ) + if skip_linux_system_special_case: + continue + + # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in + # the "pythonX.Y" part of the path, but distutils does. + skip_sysconfig_abiflag_bug = ( + sys.version_info < (3, 8) + and not WINDOWS + and k in ("headers", "platlib", "purelib") + and tuple(_fix_abiflags(old_v.parts)) == new_v.parts + ) + if skip_sysconfig_abiflag_bug: + continue + + # MSYS2 MINGW's sysconfig patch does not include the "site-packages" + # part of the path. This is incorrect and will be fixed in MSYS. + skip_msys2_mingw_bug = ( + WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme() + ) + if skip_msys2_mingw_bug: + continue + + # CPython's POSIX install script invokes pip (via ensurepip) against the + # interpreter located in the source tree, not the install site. This + # triggers special logic in sysconfig that's not present in distutils. + # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194 + skip_cpython_build = ( + sysconfig.is_python_build(check_home=True) + and not WINDOWS + and k in ("headers", "include", "platinclude") + ) + if skip_cpython_build: + continue + + warning_contexts.append((old_v, new_v, f"scheme.{k}")) + + if not warning_contexts: + return old + + # Check if this path mismatch is caused by distutils config files. Those + # files will no longer work once we switch to sysconfig, so this raises a + # deprecation message for them. + default_old = _distutils.distutils_scheme( + dist_name, + user, + home, + root, + isolated, + prefix, + ignore_config_files=True, + ) + if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS): + deprecated( + reason=( + "Configuring installation scheme with distutils config files " + "is deprecated and will no longer work in the near future. If you " + "are using a Homebrew or Linuxbrew Python, please see discussion " + "at https://github.com/Homebrew/homebrew-core/issues/76621" + ), + replacement=None, + gone_in=None, + ) + return old + + # Post warnings about this mismatch so user can report them back. + for old_v, new_v, key in warning_contexts: + _warn_mismatched(old_v, new_v, key=key) + _log_context(user=user, home=home, root=root, prefix=prefix) + + return old + + +def get_bin_prefix() -> str: + new = _sysconfig.get_bin_prefix() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_bin_prefix() + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): + _log_context() + return old + + +def get_bin_user() -> str: + return _sysconfig.get_scheme("", user=True).scripts + + +def _looks_like_deb_system_dist_packages(value: str) -> bool: + """Check if the value is Debian's APT-controlled dist-packages. + + Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the + default package path controlled by APT, but does not patch ``sysconfig`` to + do the same. This is similar to the bug worked around in ``get_scheme()``, + but here the default is ``deb_system`` instead of ``unix_local``. Ultimately + we can't do anything about this Debian bug, and this detection allows us to + skip the warning when needed. + """ + if not _looks_like_debian_scheme(): + return False + if value == "/usr/lib/python3/dist-packages": + return True + return False + + +def get_purelib() -> str: + """Return the default pure-Python lib location.""" + new = _sysconfig.get_purelib() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_purelib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): + _log_context() + return old + + +def get_platlib() -> str: + """Return the default platform-shared lib location.""" + new = _sysconfig.get_platlib() + if _USE_SYSCONFIG: + return new + + old = _distutils.get_platlib() + if _looks_like_deb_system_dist_packages(old): + return old + if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): + _log_context() + return old + + +def _deduplicated(v1: str, v2: str) -> List[str]: + """Deduplicate values from a list.""" + if v1 == v2: + return [v1] + return [v1, v2] + + +def _looks_like_apple_library(path: str) -> bool: + """Apple patches sysconfig to *always* look under */Library/Python*.""" + if sys.platform[:6] != "darwin": + return False + return path == f"/Library/Python/{get_major_minor_version()}/site-packages" + + +def get_prefixed_libs(prefix: str) -> List[str]: + """Return the lib locations under ``prefix``.""" + new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) + if _USE_SYSCONFIG: + return _deduplicated(new_pure, new_plat) + + old_pure, old_plat = _distutils.get_prefixed_libs(prefix) + old_lib_paths = _deduplicated(old_pure, old_plat) + + # Apple's Python (shipped with Xcode and Command Line Tools) hard-code + # platlib and purelib to '/Library/Python/X.Y/site-packages'. This will + # cause serious build isolation bugs when Apple starts shipping 3.10 because + # pip will install build backends to the wrong location. This tells users + # who is at fault so Apple may notice it and fix the issue in time. + if all(_looks_like_apple_library(p) for p in old_lib_paths): + deprecated( + reason=( + "Python distributed by Apple's Command Line Tools incorrectly " + "patches sysconfig to always point to '/Library/Python'. This " + "will cause build isolation to operate incorrectly on Python " + "3.10 or later. Please help report this to Apple so they can " + "fix this. https://developer.apple.com/bug-reporting/" + ), + replacement=None, + gone_in=None, + ) + return old_lib_paths + + warned = [ + _warn_if_mismatch( + pathlib.Path(old_pure), + pathlib.Path(new_pure), + key="prefixed-purelib", + ), + _warn_if_mismatch( + pathlib.Path(old_plat), + pathlib.Path(new_plat), + key="prefixed-platlib", + ), + ] + if any(warned): + _log_context(prefix=prefix) + + return old_lib_paths diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..f41ab8c25 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc new file mode 100644 index 000000000..56d78b16e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc new file mode 100644 index 000000000..08e1b5f71 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc new file mode 100644 index 000000000..71f60ee08 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py b/myenv/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py new file mode 100644 index 000000000..2ec79e65b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py @@ -0,0 +1,169 @@ +"""Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os +import sys +from distutils.cmd import Command as DistutilsCommand +from distutils.command.install import SCHEME_KEYS +from distutils.command.install import install as distutils_install_command +from distutils.sysconfig import get_python_lib +from typing import Dict, List, Optional, Tuple, Union, cast + +from pip._internal.models.scheme import Scheme +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version + +logger = logging.getLogger(__name__) + + +def distutils_scheme( + dist_name: str, + user: bool = False, + home: str = None, + root: str = None, + isolated: bool = False, + prefix: str = None, + *, + ignore_config_files: bool = False, +) -> Dict[str, str]: + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] + + d = Distribution(dist_args) + if not ignore_config_files: + try: + d.parse_config_files() + except UnicodeDecodeError: + # Typeshed does not include find_config_files() for some reason. + paths = d.find_config_files() # type: ignore + logger.warning( + "Ignore distutils configs in %s due to encoding errors.", + ", ".join(os.path.basename(p) for p in paths), + ) + obj: Optional[DistutilsCommand] = None + obj = d.get_command_obj("install", create=True) + assert obj is not None + i = cast(distutils_install_command, obj) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), f"user={user} prefix={prefix}" + assert not (home and prefix), f"home={home} prefix={prefix}" + i.user = user or i.user + if user or home: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + + scheme = {} + for key in SCHEME_KEYS: + scheme[key] = getattr(i, "install_" + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if "install_lib" in d.get_option_dict("install"): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + if home: + prefix = home + elif user: + prefix = i.install_userbase # type: ignore + else: + prefix = i.prefix + scheme["headers"] = os.path.join( + prefix, + "include", + "site", + f"python{get_major_minor_version()}", + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join(root, path_no_drive[1:]) + + return scheme + + +def get_scheme( + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) + + +def get_bin_prefix() -> str: + # XXX: In old virtualenv versions, sys.prefix can contain '..' components, + # so we need to call normpath to eliminate them. + prefix = os.path.normpath(sys.prefix) + if WINDOWS: + bin_py = os.path.join(prefix, "Scripts") + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(prefix, "bin") + return bin_py + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return os.path.join(prefix, "bin") + + +def get_purelib() -> str: + return get_python_lib(plat_specific=False) + + +def get_platlib() -> str: + return get_python_lib(plat_specific=True) + + +def get_prefixed_libs(prefix: str) -> Tuple[str, str]: + return ( + get_python_lib(plat_specific=False, prefix=prefix), + get_python_lib(plat_specific=True, prefix=prefix), + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py b/myenv/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py new file mode 100644 index 000000000..5e141aa1b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py @@ -0,0 +1,219 @@ +import distutils.util # FIXME: For change_root. +import logging +import os +import sys +import sysconfig +import typing + +from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import get_major_minor_version, is_osx_framework + +logger = logging.getLogger(__name__) + + +# Notes on _infer_* functions. +# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no +# way to ask things like "what is the '_prefix' scheme on this platform". These +# functions try to answer that with some heuristics while accounting for ad-hoc +# platforms not covered by CPython's default sysconfig implementation. If the +# ad-hoc implementation does not fully implement sysconfig, we'll fall back to +# a POSIX scheme. + +_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) + +_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None) + + +def _should_use_osx_framework_prefix() -> bool: + """Check for Apple's ``osx_framework_library`` scheme. + + Python distributed by Apple's Command Line Tools has this special scheme + that's used when: + + * This is a framework build. + * We are installing into the system prefix. + + This does not account for ``pip install --prefix`` (also means we're not + installing to the system prefix), which should use ``posix_prefix``, but + logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But + since ``prefix`` is not available for ``sysconfig.get_default_scheme()``, + which is the stdlib replacement for ``_infer_prefix()``, presumably Apple + wouldn't be able to magically switch between ``osx_framework_library`` and + ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library`` + means its behavior is consistent whether we use the stdlib implementation + or our own, and we deal with this special case in ``get_scheme()`` instead. + """ + return ( + "osx_framework_library" in _AVAILABLE_SCHEMES + and not running_under_virtualenv() + and is_osx_framework() + ) + + +def _infer_prefix() -> str: + """Try to find a prefix scheme for the current platform. + + This tries: + + * A special ``osx_framework_library`` for Python distributed by Apple's + Command Line Tools, when not running in a virtual environment. + * Implementation + OS, used by PyPy on Windows (``pypy_nt``). + * Implementation without OS, used by PyPy on POSIX (``pypy``). + * OS + "prefix", used by CPython on POSIX (``posix_prefix``). + * Just the OS name, used by CPython on Windows (``nt``). + + If none of the above works, fall back to ``posix_prefix``. + """ + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("prefix") + if _should_use_osx_framework_prefix(): + return "osx_framework_library" + implementation_suffixed = f"{sys.implementation.name}_{os.name}" + if implementation_suffixed in _AVAILABLE_SCHEMES: + return implementation_suffixed + if sys.implementation.name in _AVAILABLE_SCHEMES: + return sys.implementation.name + suffixed = f"{os.name}_prefix" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt". + return os.name + return "posix_prefix" + + +def _infer_user() -> str: + """Try to find a user scheme for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("user") + if is_osx_framework() and not running_under_virtualenv(): + suffixed = "osx_framework_user" + else: + suffixed = f"{os.name}_user" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. + raise UserInstallationInvalid() + return "posix_user" + + +def _infer_home() -> str: + """Try to find a home for the current platform.""" + if _PREFERRED_SCHEME_API: + return _PREFERRED_SCHEME_API("home") + suffixed = f"{os.name}_home" + if suffixed in _AVAILABLE_SCHEMES: + return suffixed + return "posix_home" + + +# Update these keys if the user sets a custom home. +_HOME_KEYS = [ + "installed_base", + "base", + "installed_platbase", + "platbase", + "prefix", + "exec_prefix", +] +if sysconfig.get_config_var("userbase") is not None: + _HOME_KEYS.append("userbase") + + +def get_scheme( + dist_name: str, + user: bool = False, + home: typing.Optional[str] = None, + root: typing.Optional[str] = None, + isolated: bool = False, + prefix: typing.Optional[str] = None, +) -> Scheme: + """ + Get the "scheme" corresponding to the input parameters. + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme + :param root: root under which other directories are re-based + :param isolated: ignored, but kept for distutils compatibility (where + this controls whether the user-site pydistutils.cfg is honored) + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + if user and prefix: + raise InvalidSchemeCombination("--user", "--prefix") + if home and prefix: + raise InvalidSchemeCombination("--home", "--prefix") + + if home is not None: + scheme_name = _infer_home() + elif user: + scheme_name = _infer_user() + else: + scheme_name = _infer_prefix() + + # Special case: When installing into a custom prefix, use posix_prefix + # instead of osx_framework_library. See _should_use_osx_framework_prefix() + # docstring for details. + if prefix is not None and scheme_name == "osx_framework_library": + scheme_name = "posix_prefix" + + if home is not None: + variables = {k: home for k in _HOME_KEYS} + elif prefix is not None: + variables = {k: prefix for k in _HOME_KEYS} + else: + variables = {} + + paths = sysconfig.get_paths(scheme=scheme_name, vars=variables) + + # Logic here is very arbitrary, we're doing it for compatibility, don't ask. + # 1. Pip historically uses a special header path in virtual environments. + # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We + # only do the same when not running in a virtual environment because + # pip's historical header path logic (see point 1) did not do this. + if running_under_virtualenv(): + if user: + base = variables.get("userbase", sys.prefix) + else: + base = variables.get("base", sys.prefix) + python_xy = f"python{get_major_minor_version()}" + paths["include"] = os.path.join(base, "include", "site", python_xy) + elif not dist_name: + dist_name = "UNKNOWN" + + scheme = Scheme( + platlib=paths["platlib"], + purelib=paths["purelib"], + headers=os.path.join(paths["include"], dist_name), + scripts=paths["scripts"], + data=paths["data"], + ) + if root is not None: + for key in SCHEME_KEYS: + value = distutils.util.change_root(root, getattr(scheme, key)) + setattr(scheme, key, value) + return scheme + + +def get_bin_prefix() -> str: + # Forcing to use /usr/local/bin for standard macOS framework installs. + if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": + return "/usr/local/bin" + return sysconfig.get_paths()["scripts"] + + +def get_purelib() -> str: + return sysconfig.get_paths()["purelib"] + + +def get_platlib() -> str: + return sysconfig.get_paths()["platlib"] + + +def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]: + paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix}) + return (paths["purelib"], paths["platlib"]) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/locations/base.py b/myenv/lib/python3.10/site-packages/pip/_internal/locations/base.py new file mode 100644 index 000000000..86dad4a3a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/locations/base.py @@ -0,0 +1,52 @@ +import functools +import os +import site +import sys +import sysconfig +import typing + +from pip._internal.utils import appdirs +from pip._internal.utils.virtualenv import running_under_virtualenv + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + +# FIXME doesn't account for venv linked to global site-packages +site_packages: typing.Optional[str] = sysconfig.get_path("purelib") + + +def get_major_minor_version() -> str: + """ + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". + """ + return "{}.{}".format(*sys.version_info) + + +def get_src_prefix() -> str: + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, "src") + else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), "src") + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit("The folder you are executing pip from can no longer be found.") + + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) + + +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site: typing.Optional[str] = site.getusersitepackages() +except AttributeError: + user_site = site.USER_SITE + + +@functools.lru_cache(maxsize=None) +def is_osx_framework() -> bool: + return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/main.py b/myenv/lib/python3.10/site-packages/pip/_internal/main.py new file mode 100644 index 000000000..33c6d24cd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/main.py @@ -0,0 +1,12 @@ +from typing import List, Optional + + +def main(args: Optional[List[str]] = None) -> int: + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__init__.py new file mode 100644 index 000000000..cc037c14f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__init__.py @@ -0,0 +1,62 @@ +from typing import List, Optional + +from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel + +__all__ = [ + "BaseDistribution", + "BaseEnvironment", + "FilesystemWheel", + "MemoryWheel", + "Wheel", + "get_default_environment", + "get_environment", + "get_wheel_distribution", +] + + +def get_default_environment() -> BaseEnvironment: + """Get the default representation for the current environment. + + This returns an Environment instance from the chosen backend. The default + Environment instance should be built from ``sys.path`` and may use caching + to share instance state accorss calls. + """ + from .pkg_resources import Environment + + return Environment.default() + + +def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: + """Get a representation of the environment specified by ``paths``. + + This returns an Environment instance from the chosen backend based on the + given import paths. The backend must build a fresh instance representing + the state of installed distributions when this function is called. + """ + from .pkg_resources import Environment + + return Environment.from_paths(paths) + + +def get_directory_distribution(directory: str) -> BaseDistribution: + """Get the distribution metadata representation in the specified directory. + + This returns a Distribution instance from the chosen backend based on + the given on-disk ``.dist-info`` directory. + """ + from .pkg_resources import Distribution + + return Distribution.from_directory(directory) + + +def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution: + """Get the representation of the specified wheel's distribution metadata. + + This returns a Distribution instance from the chosen backend based on + the given wheel's ``.dist-info`` directory. + + :param canonical_name: Normalized project name of the given wheel. + """ + from .pkg_resources import Distribution + + return Distribution.from_wheel(wheel, canonical_name) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..af1cba081 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/base.cpython-310.pyc new file mode 100644 index 000000000..3efc61055 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc new file mode 100644 index 000000000..459b37050 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/metadata/base.py b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/base.py new file mode 100644 index 000000000..1a5a781cb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/base.py @@ -0,0 +1,546 @@ +import csv +import email.message +import json +import logging +import pathlib +import re +import zipfile +from typing import ( + IO, + TYPE_CHECKING, + Collection, + Container, + Iterable, + Iterator, + List, + Optional, + Tuple, + Union, +) + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet +from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.exceptions import NoneMetadataError +from pip._internal.locations import site_packages, user_site +from pip._internal.models.direct_url import ( + DIRECT_URL_METADATA_NAME, + DirectUrl, + DirectUrlValidationError, +) +from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. +from pip._internal.utils.egg_link import ( + egg_link_path_from_location, + egg_link_path_from_sys_path, +) +from pip._internal.utils.misc import is_local, normalize_path +from pip._internal.utils.urls import url_to_path + +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + +DistributionVersion = Union[LegacyVersion, Version] + +InfoPath = Union[str, pathlib.PurePosixPath] + +logger = logging.getLogger(__name__) + + +class BaseEntryPoint(Protocol): + @property + def name(self) -> str: + raise NotImplementedError() + + @property + def value(self) -> str: + raise NotImplementedError() + + @property + def group(self) -> str: + raise NotImplementedError() + + +def _convert_installed_files_path( + entry: Tuple[str, ...], + info: Tuple[str, ...], +) -> str: + """Convert a legacy installed-files.txt path into modern RECORD path. + + The legacy format stores paths relative to the info directory, while the + modern format stores paths relative to the package root, e.g. the + site-packages directory. + + :param entry: Path parts of the installed-files.txt entry. + :param info: Path parts of the egg-info directory relative to package root. + :returns: The converted entry. + + For best compatibility with symlinks, this does not use ``abspath()`` or + ``Path.resolve()``, but tries to work with path parts: + + 1. While ``entry`` starts with ``..``, remove the equal amounts of parts + from ``info``; if ``info`` is empty, start appending ``..`` instead. + 2. Join the two directly. + """ + while entry and entry[0] == "..": + if not info or info[-1] == "..": + info += ("..",) + else: + info = info[:-1] + entry = entry[1:] + return str(pathlib.Path(*info, *entry)) + + +class BaseDistribution(Protocol): + def __repr__(self) -> str: + return f"{self.raw_name} {self.version} ({self.location})" + + def __str__(self) -> str: + return f"{self.raw_name} {self.version}" + + @property + def location(self) -> Optional[str]: + """Where the distribution is loaded from. + + A string value is not necessarily a filesystem path, since distributions + can be loaded from other sources, e.g. arbitrary zip archives. ``None`` + means the distribution is created in-memory. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and files in the distribution. + """ + raise NotImplementedError() + + @property + def editable_project_location(self) -> Optional[str]: + """The project location for editable distributions. + + This is the directory where pyproject.toml or setup.py is located. + None if the distribution is not installed in editable mode. + """ + # TODO: this property is relatively costly to compute, memoize it ? + direct_url = self.direct_url + if direct_url: + if direct_url.is_local_editable(): + return url_to_path(direct_url.url) + else: + # Search for an .egg-link file by walking sys.path, as it was + # done before by dist_is_editable(). + egg_link_path = egg_link_path_from_sys_path(self.raw_name) + if egg_link_path: + # TODO: get project location from second line of egg_link file + # (https://github.com/pypa/pip/issues/10243) + return self.location + return None + + @property + def installed_location(self) -> Optional[str]: + """The distribution's "installed" location. + + This should generally be a ``site-packages`` directory. This is + usually ``dist.location``, except for legacy develop-installed packages, + where ``dist.location`` is the source code location, and this is where + the ``.egg-link`` file is. + + The returned location is normalized (in particular, with symlinks removed). + """ + egg_link = egg_link_path_from_location(self.raw_name) + if egg_link: + location = egg_link + elif self.location: + location = self.location + else: + return None + return normalize_path(location) + + @property + def info_location(self) -> Optional[str]: + """Location of the .[egg|dist]-info directory or file. + + Similarly to ``location``, a string value is not necessarily a + filesystem path. ``None`` means the distribution is created in-memory. + + For a modern .dist-info installation on disk, this should be something + like ``{location}/{raw_name}-{version}.dist-info``. + + Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If + this is a symbolic link, we want to preserve the relative path between + it and other files in the distribution. + """ + raise NotImplementedError() + + @property + def installed_by_distutils(self) -> bool: + """Whether this distribution is installed with legacy distutils format. + + A distribution installed with "raw" distutils not patched by setuptools + uses one single file at ``info_location`` to store metadata. We need to + treat this specially on uninstallation. + """ + info_location = self.info_location + if not info_location: + return False + return pathlib.Path(info_location).is_file() + + @property + def installed_as_egg(self) -> bool: + """Whether this distribution is installed as an egg. + + This usually indicates the distribution was installed by (older versions + of) easy_install. + """ + location = self.location + if not location: + return False + return location.endswith(".egg") + + @property + def installed_with_setuptools_egg_info(self) -> bool: + """Whether this distribution is installed with the ``.egg-info`` format. + + This usually indicates the distribution was installed with setuptools + with an old pip version or with ``single-version-externally-managed``. + + Note that this ensure the metadata store is a directory. distutils can + also installs an ``.egg-info``, but as a file, not a directory. This + property is *False* for that case. Also see ``installed_by_distutils``. + """ + info_location = self.info_location + if not info_location: + return False + if not info_location.endswith(".egg-info"): + return False + return pathlib.Path(info_location).is_dir() + + @property + def installed_with_dist_info(self) -> bool: + """Whether this distribution is installed with the "modern format". + + This indicates a "modern" installation, e.g. storing metadata in the + ``.dist-info`` directory. This applies to installations made by + setuptools (but through pip, not directly), or anything using the + standardized build backend interface (PEP 517). + """ + info_location = self.info_location + if not info_location: + return False + if not info_location.endswith(".dist-info"): + return False + return pathlib.Path(info_location).is_dir() + + @property + def canonical_name(self) -> NormalizedName: + raise NotImplementedError() + + @property + def version(self) -> DistributionVersion: + raise NotImplementedError() + + @property + def setuptools_filename(self) -> str: + """Convert a project name to its setuptools-compatible filename. + + This is a copy of ``pkg_resources.to_filename()`` for compatibility. + """ + return self.raw_name.replace("-", "_") + + @property + def direct_url(self) -> Optional[DirectUrl]: + """Obtain a DirectUrl from this distribution. + + Returns None if the distribution has no `direct_url.json` metadata, + or if `direct_url.json` is invalid. + """ + try: + content = self.read_text(DIRECT_URL_METADATA_NAME) + except FileNotFoundError: + return None + try: + return DirectUrl.from_json(content) + except ( + UnicodeDecodeError, + json.JSONDecodeError, + DirectUrlValidationError, + ) as e: + logger.warning( + "Error parsing %s for %s: %s", + DIRECT_URL_METADATA_NAME, + self.canonical_name, + e, + ) + return None + + @property + def installer(self) -> str: + try: + installer_text = self.read_text("INSTALLER") + except (OSError, ValueError, NoneMetadataError): + return "" # Fail silently if the installer file cannot be read. + for line in installer_text.splitlines(): + cleaned_line = line.strip() + if cleaned_line: + return cleaned_line + return "" + + @property + def editable(self) -> bool: + return bool(self.editable_project_location) + + @property + def local(self) -> bool: + """If distribution is installed in the current virtual environment. + + Always True if we're not in a virtualenv. + """ + if self.installed_location is None: + return False + return is_local(self.installed_location) + + @property + def in_usersite(self) -> bool: + if self.installed_location is None or user_site is None: + return False + return self.installed_location.startswith(normalize_path(user_site)) + + @property + def in_site_packages(self) -> bool: + if self.installed_location is None or site_packages is None: + return False + return self.installed_location.startswith(normalize_path(site_packages)) + + def is_file(self, path: InfoPath) -> bool: + """Check whether an entry in the info directory is a file.""" + raise NotImplementedError() + + def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: + """Iterate through a directory in the info directory. + + Each item yielded would be a path relative to the info directory. + + :raise FileNotFoundError: If ``name`` does not exist in the directory. + :raise NotADirectoryError: If ``name`` does not point to a directory. + """ + raise NotImplementedError() + + def read_text(self, path: InfoPath) -> str: + """Read a file in the info directory. + + :raise FileNotFoundError: If ``name`` does not exist in the directory. + :raise NoneMetadataError: If ``name`` exists in the info directory, but + cannot be read. + """ + raise NotImplementedError() + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + raise NotImplementedError() + + @property + def metadata(self) -> email.message.Message: + """Metadata of distribution parsed from e.g. METADATA or PKG-INFO. + + This should return an empty message if the metadata file is unavailable. + + :raises NoneMetadataError: If the metadata file is available, but does + not contain valid metadata. + """ + raise NotImplementedError() + + @property + def metadata_version(self) -> Optional[str]: + """Value of "Metadata-Version:" in distribution metadata, if available.""" + return self.metadata.get("Metadata-Version") + + @property + def raw_name(self) -> str: + """Value of "Name:" in distribution metadata.""" + # The metadata should NEVER be missing the Name: key, but if it somehow + # does, fall back to the known canonical name. + return self.metadata.get("Name", self.canonical_name) + + @property + def requires_python(self) -> SpecifierSet: + """Value of "Requires-Python:" in distribution metadata. + + If the key does not exist or contains an invalid value, an empty + SpecifierSet should be returned. + """ + value = self.metadata.get("Requires-Python") + if value is None: + return SpecifierSet() + try: + # Convert to str to satisfy the type checker; this can be a Header object. + spec = SpecifierSet(str(value)) + except InvalidSpecifier as e: + message = "Package %r has an invalid Requires-Python: %s" + logger.warning(message, self.raw_name, e) + return SpecifierSet() + return spec + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + """Dependencies of this distribution. + + For modern .dist-info distributions, this is the collection of + "Requires-Dist:" entries in distribution metadata. + """ + raise NotImplementedError() + + def iter_provided_extras(self) -> Iterable[str]: + """Extras provided by this distribution. + + For modern .dist-info distributions, this is the collection of + "Provides-Extra:" entries in distribution metadata. + """ + raise NotImplementedError() + + def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]: + try: + text = self.read_text("RECORD") + except FileNotFoundError: + return None + # This extra Path-str cast normalizes entries. + return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines())) + + def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]: + try: + text = self.read_text("installed-files.txt") + except FileNotFoundError: + return None + paths = (p for p in text.splitlines(keepends=False) if p) + root = self.location + info = self.info_location + if root is None or info is None: + return paths + try: + info_rel = pathlib.Path(info).relative_to(root) + except ValueError: # info is not relative to root. + return paths + if not info_rel.parts: # info *is* root. + return paths + return ( + _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts) + for p in paths + ) + + def iter_declared_entries(self) -> Optional[Iterator[str]]: + """Iterate through file entires declared in this distribution. + + For modern .dist-info distributions, this is the files listed in the + ``RECORD`` metadata file. For legacy setuptools distributions, this + comes from ``installed-files.txt``, with entries normalized to be + compatible with the format used by ``RECORD``. + + :return: An iterator for listed entries, or None if the distribution + contains neither ``RECORD`` nor ``installed-files.txt``. + """ + return ( + self._iter_declared_entries_from_record() + or self._iter_declared_entries_from_legacy() + ) + + +class BaseEnvironment: + """An environment containing distributions to introspect.""" + + @classmethod + def default(cls) -> "BaseEnvironment": + raise NotImplementedError() + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment": + raise NotImplementedError() + + def get_distribution(self, name: str) -> Optional["BaseDistribution"]: + """Given a requirement name, return the installed distributions. + + The name may not be normalized. The implementation must canonicalize + it for lookup. + """ + raise NotImplementedError() + + def _iter_distributions(self) -> Iterator["BaseDistribution"]: + """Iterate through installed distributions. + + This function should be implemented by subclass, but never called + directly. Use the public ``iter_distribution()`` instead, which + implements additional logic to make sure the distributions are valid. + """ + raise NotImplementedError() + + def iter_distributions(self) -> Iterator["BaseDistribution"]: + """Iterate through installed distributions.""" + for dist in self._iter_distributions(): + # Make sure the distribution actually comes from a valid Python + # packaging distribution. Pip's AdjacentTempDirectory leaves folders + # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The + # valid project name pattern is taken from PEP 508. + project_name_valid = re.match( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", + dist.canonical_name, + flags=re.IGNORECASE, + ) + if not project_name_valid: + logger.warning( + "Ignoring invalid distribution %s (%s)", + dist.canonical_name, + dist.location, + ) + continue + yield dist + + def iter_installed_distributions( + self, + local_only: bool = True, + skip: Container[str] = stdlib_pkgs, + include_editables: bool = True, + editables_only: bool = False, + user_only: bool = False, + ) -> Iterator[BaseDistribution]: + """Return a list of installed distributions. + + :param local_only: If True (default), only return installations + local to the current virtualenv, if in a virtualenv. + :param skip: An iterable of canonicalized project names to ignore; + defaults to ``stdlib_pkgs``. + :param include_editables: If False, don't report editables. + :param editables_only: If True, only report editables. + :param user_only: If True, only report installations in the user + site directory. + """ + it = self.iter_distributions() + if local_only: + it = (d for d in it if d.local) + if not include_editables: + it = (d for d in it if not d.editable) + if editables_only: + it = (d for d in it if d.editable) + if user_only: + it = (d for d in it if d.in_usersite) + return (d for d in it if d.canonical_name not in skip) + + +class Wheel(Protocol): + location: str + + def as_zipfile(self) -> zipfile.ZipFile: + raise NotImplementedError() + + +class FilesystemWheel(Wheel): + def __init__(self, location: str) -> None: + self.location = location + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.location, allowZip64=True) + + +class MemoryWheel(Wheel): + def __init__(self, location: str, stream: IO[bytes]) -> None: + self.location = location + self.stream = stream + + def as_zipfile(self) -> zipfile.ZipFile: + return zipfile.ZipFile(self.stream, allowZip64=True) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/metadata/pkg_resources.py b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/pkg_resources.py new file mode 100644 index 000000000..d39f0ba31 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/metadata/pkg_resources.py @@ -0,0 +1,256 @@ +import email.message +import email.parser +import logging +import os +import pathlib +import zipfile +from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional + +from pip._vendor import pkg_resources +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel +from pip._internal.utils.misc import display_path +from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file + +from .base import ( + BaseDistribution, + BaseEntryPoint, + BaseEnvironment, + DistributionVersion, + InfoPath, + Wheel, +) + +logger = logging.getLogger(__name__) + + +class EntryPoint(NamedTuple): + name: str + value: str + group: str + + +class WheelMetadata: + """IMetadataProvider that reads metadata files from a dictionary. + + This also maps metadata decoding exceptions to our internal exception type. + """ + + def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None: + self._metadata = metadata + self._wheel_name = wheel_name + + def has_metadata(self, name: str) -> bool: + return name in self._metadata + + def get_metadata(self, name: str) -> str: + try: + return self._metadata[name].decode() + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + f"Error decoding metadata for {self._wheel_name}: {e} in {name} file" + ) + + def get_metadata_lines(self, name: str) -> Iterable[str]: + return pkg_resources.yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name: str) -> bool: + return False + + def metadata_listdir(self, name: str) -> List[str]: + return [] + + def run_script(self, script_name: str, namespace: str) -> None: + pass + + +class Distribution(BaseDistribution): + def __init__(self, dist: pkg_resources.Distribution) -> None: + self._dist = dist + + @classmethod + def from_directory(cls, directory: str) -> "Distribution": + dist_dir = directory.rstrip(os.sep) + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + dist_name = os.path.splitext(dist_dir_name)[0] + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] + + dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata) + return cls(dist) + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> "Distribution": + """Load the distribution from a given wheel. + + :raises InvalidWheel: Whenever loading of the wheel causes a + :py:exc:`zipfile.BadZipFile` exception to be thrown. + :raises UnsupportedWheel: If the wheel is a valid zip, but malformed + internally. + """ + try: + with wheel.as_zipfile() as zf: + info_dir, _ = parse_wheel(zf, name) + metadata_text = { + path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path) + for path in zf.namelist() + if path.startswith(f"{info_dir}/") + } + except zipfile.BadZipFile as e: + raise InvalidWheel(wheel.location, name) from e + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") + dist = pkg_resources.DistInfoDistribution( + location=wheel.location, + metadata=WheelMetadata(metadata_text, wheel.location), + project_name=name, + ) + return cls(dist) + + @property + def location(self) -> Optional[str]: + return self._dist.location + + @property + def info_location(self) -> Optional[str]: + return self._dist.egg_info + + @property + def installed_by_distutils(self) -> bool: + # A distutils-installed distribution is provided by FileMetadata. This + # provider has a "path" attribute not present anywhere else. Not the + # best introspection logic, but pip has been doing this for a long time. + try: + return bool(self._dist._provider.path) + except AttributeError: + return False + + @property + def canonical_name(self) -> NormalizedName: + return canonicalize_name(self._dist.project_name) + + @property + def version(self) -> DistributionVersion: + return parse_version(self._dist.version) + + def is_file(self, path: InfoPath) -> bool: + return self._dist.has_metadata(str(path)) + + def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: + name = str(path) + if not self._dist.has_metadata(name): + raise FileNotFoundError(name) + if not self._dist.isdir(name): + raise NotADirectoryError(name) + for child in self._dist.metadata_listdir(name): + yield pathlib.PurePosixPath(path, child) + + def read_text(self, path: InfoPath) -> str: + name = str(path) + if not self._dist.has_metadata(name): + raise FileNotFoundError(name) + content = self._dist.get_metadata(name) + if content is None: + raise NoneMetadataError(self, name) + return content + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + for group, entries in self._dist.get_entry_map().items(): + for name, entry_point in entries.items(): + name, _, value = str(entry_point).partition("=") + yield EntryPoint(name=name.strip(), value=value.strip(), group=group) + + @property + def metadata(self) -> email.message.Message: + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + if isinstance(self._dist, pkg_resources.DistInfoDistribution): + metadata_name = "METADATA" + else: + metadata_name = "PKG-INFO" + try: + metadata = self.read_text(metadata_name) + except FileNotFoundError: + if self.location: + displaying_path = display_path(self.location) + else: + displaying_path = repr(self.location) + logger.warning("No metadata found in %s", displaying_path) + metadata = "" + feed_parser = email.parser.FeedParser() + feed_parser.feed(metadata) + return feed_parser.close() + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + if extras: # pkg_resources raises on invalid extras, so we sanitize. + extras = frozenset(extras).intersection(self._dist.extras) + return self._dist.requires(extras) + + def iter_provided_extras(self) -> Iterable[str]: + return self._dist.extras + + +class Environment(BaseEnvironment): + def __init__(self, ws: pkg_resources.WorkingSet) -> None: + self._ws = ws + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(pkg_resources.working_set) + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: + return cls(pkg_resources.WorkingSet(paths)) + + def _search_distribution(self, name: str) -> Optional[BaseDistribution]: + """Find a distribution matching the ``name`` in the environment. + + This searches from *all* distributions available in the environment, to + match the behavior of ``pkg_resources.get_distribution()``. + """ + canonical_name = canonicalize_name(name) + for dist in self.iter_distributions(): + if dist.canonical_name == canonical_name: + return dist + return None + + def get_distribution(self, name: str) -> Optional[BaseDistribution]: + # Search the distribution by looking through the working set. + dist = self._search_distribution(name) + if dist: + return dist + + # If distribution could not be found, call working_set.require to + # update the working set, and try to find the distribution again. + # This might happen for e.g. when you install a package twice, once + # using setup.py develop and again using setup.py install. Now when + # running pip uninstall twice, the package gets removed from the + # working set in the first uninstall, so we have to populate the + # working set again so that pip knows about it and the packages gets + # picked up and is successfully uninstalled the second time too. + try: + # We didn't pass in any version specifiers, so this can never + # raise pkg_resources.VersionConflict. + self._ws.require(name) + except pkg_resources.DistributionNotFound: + return None + return self._search_distribution(name) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + for dist in self._ws: + yield Distribution(dist) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/__init__.py new file mode 100644 index 000000000..7855226e4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/__init__.py @@ -0,0 +1,2 @@ +"""A package that contains models that represent entities. +""" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..668549d35 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc new file mode 100644 index 000000000..1e26b0609 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc new file mode 100644 index 000000000..1ab8c9197 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc new file mode 100644 index 000000000..bb1126a4d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc new file mode 100644 index 000000000..3179e2d84 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc new file mode 100644 index 000000000..ecb77b9ad Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc new file mode 100644 index 000000000..da3d8a3bf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc new file mode 100644 index 000000000..825ea143e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc new file mode 100644 index 000000000..2a3a74b3d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc new file mode 100644 index 000000000..7e3ede27b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..3d9955498 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/candidate.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/candidate.py new file mode 100644 index 000000000..a4963aec6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/candidate.py @@ -0,0 +1,34 @@ +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.models.link import Link +from pip._internal.utils.models import KeyBasedCompareMixin + + +class InstallationCandidate(KeyBasedCompareMixin): + """Represents a potential "candidate" for installation.""" + + __slots__ = ["name", "version", "link"] + + def __init__(self, name: str, version: str, link: Link) -> None: + self.name = name + self.version = parse_version(version) + self.link = link + + super().__init__( + key=(self.name, self.version, self.link), + defining_class=InstallationCandidate, + ) + + def __repr__(self) -> str: + return "".format( + self.name, + self.version, + self.link, + ) + + def __str__(self) -> str: + return "{!r} candidate (version {} at {})".format( + self.name, + self.version, + self.link, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/direct_url.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/direct_url.py new file mode 100644 index 000000000..92060d45d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/direct_url.py @@ -0,0 +1,220 @@ +""" PEP 610 """ +import json +import re +import urllib.parse +from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union + +__all__ = [ + "DirectUrl", + "DirectUrlValidationError", + "DirInfo", + "ArchiveInfo", + "VcsInfo", +] + +T = TypeVar("T") + +DIRECT_URL_METADATA_NAME = "direct_url.json" +ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$") + + +class DirectUrlValidationError(Exception): + pass + + +def _get( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> Optional[T]: + """Get value from dictionary and verify expected type.""" + if key not in d: + return default + value = d[key] + if not isinstance(value, expected_type): + raise DirectUrlValidationError( + "{!r} has unexpected type for {} (expected {})".format( + value, key, expected_type + ) + ) + return value + + +def _get_required( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> T: + value = _get(d, expected_type, key, default) + if value is None: + raise DirectUrlValidationError(f"{key} must have a value") + return value + + +def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType": + infos = [info for info in infos if info is not None] + if not infos: + raise DirectUrlValidationError( + "missing one of archive_info, dir_info, vcs_info" + ) + if len(infos) > 1: + raise DirectUrlValidationError( + "more than one of archive_info, dir_info, vcs_info" + ) + assert infos[0] is not None + return infos[0] + + +def _filter_none(**kwargs: Any) -> Dict[str, Any]: + """Make dict excluding None values.""" + return {k: v for k, v in kwargs.items() if v is not None} + + +class VcsInfo: + name = "vcs_info" + + def __init__( + self, + vcs: str, + commit_id: str, + requested_revision: Optional[str] = None, + resolved_revision: Optional[str] = None, + resolved_revision_type: Optional[str] = None, + ) -> None: + self.vcs = vcs + self.requested_revision = requested_revision + self.commit_id = commit_id + self.resolved_revision = resolved_revision + self.resolved_revision_type = resolved_revision_type + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: + if d is None: + return None + return cls( + vcs=_get_required(d, str, "vcs"), + commit_id=_get_required(d, str, "commit_id"), + requested_revision=_get(d, str, "requested_revision"), + resolved_revision=_get(d, str, "resolved_revision"), + resolved_revision_type=_get(d, str, "resolved_revision_type"), + ) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none( + vcs=self.vcs, + requested_revision=self.requested_revision, + commit_id=self.commit_id, + resolved_revision=self.resolved_revision, + resolved_revision_type=self.resolved_revision_type, + ) + + +class ArchiveInfo: + name = "archive_info" + + def __init__( + self, + hash: Optional[str] = None, + ) -> None: + self.hash = hash + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: + if d is None: + return None + return cls(hash=_get(d, str, "hash")) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none(hash=self.hash) + + +class DirInfo: + name = "dir_info" + + def __init__( + self, + editable: bool = False, + ) -> None: + self.editable = editable + + @classmethod + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: + if d is None: + return None + return cls(editable=_get_required(d, bool, "editable", default=False)) + + def _to_dict(self) -> Dict[str, Any]: + return _filter_none(editable=self.editable or None) + + +InfoType = Union[ArchiveInfo, DirInfo, VcsInfo] + + +class DirectUrl: + def __init__( + self, + url: str, + info: InfoType, + subdirectory: Optional[str] = None, + ) -> None: + self.url = url + self.info = info + self.subdirectory = subdirectory + + def _remove_auth_from_netloc(self, netloc: str) -> str: + if "@" not in netloc: + return netloc + user_pass, netloc_no_user_pass = netloc.split("@", 1) + if ( + isinstance(self.info, VcsInfo) + and self.info.vcs == "git" + and user_pass == "git" + ): + return netloc + if ENV_VAR_RE.match(user_pass): + return netloc + return netloc_no_user_pass + + @property + def redacted_url(self) -> str: + """url with user:password part removed unless it is formed with + environment variables as specified in PEP 610, or it is ``git`` + in the case of a git URL. + """ + purl = urllib.parse.urlsplit(self.url) + netloc = self._remove_auth_from_netloc(purl.netloc) + surl = urllib.parse.urlunsplit( + (purl.scheme, netloc, purl.path, purl.query, purl.fragment) + ) + return surl + + def validate(self) -> None: + self.from_dict(self.to_dict()) + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl": + return DirectUrl( + url=_get_required(d, str, "url"), + subdirectory=_get(d, str, "subdirectory"), + info=_exactly_one_of( + [ + ArchiveInfo._from_dict(_get(d, dict, "archive_info")), + DirInfo._from_dict(_get(d, dict, "dir_info")), + VcsInfo._from_dict(_get(d, dict, "vcs_info")), + ] + ), + ) + + def to_dict(self) -> Dict[str, Any]: + res = _filter_none( + url=self.redacted_url, + subdirectory=self.subdirectory, + ) + res[self.info.name] = self.info._to_dict() + return res + + @classmethod + def from_json(cls, s: str) -> "DirectUrl": + return cls.from_dict(json.loads(s)) + + def to_json(self) -> str: + return json.dumps(self.to_dict(), sort_keys=True) + + def is_local_editable(self) -> bool: + return isinstance(self.info, DirInfo) and self.info.editable diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/format_control.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/format_control.py new file mode 100644 index 000000000..db3995eac --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/format_control.py @@ -0,0 +1,80 @@ +from typing import FrozenSet, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import CommandError + + +class FormatControl: + """Helper for managing formats from which a package can be installed.""" + + __slots__ = ["no_binary", "only_binary"] + + def __init__( + self, + no_binary: Optional[Set[str]] = None, + only_binary: Optional[Set[str]] = None, + ) -> None: + if no_binary is None: + no_binary = set() + if only_binary is None: + only_binary = set() + + self.no_binary = no_binary + self.only_binary = only_binary + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + + if self.__slots__ != other.__slots__: + return False + + return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) + + def __repr__(self) -> str: + return "{}({}, {})".format( + self.__class__.__name__, self.no_binary, self.only_binary + ) + + @staticmethod + def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: + if value.startswith("-"): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) + new = value.split(",") + while ":all:" in new: + other.clear() + target.clear() + target.add(":all:") + del new[: new.index(":all:") + 1] + # Without a none, we want to discard everything as :all: covers it + if ":none:" not in new: + return + for name in new: + if name == ":none:": + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]: + result = {"binary", "source"} + if canonical_name in self.only_binary: + result.discard("source") + elif canonical_name in self.no_binary: + result.discard("binary") + elif ":all:" in self.only_binary: + result.discard("source") + elif ":all:" in self.no_binary: + result.discard("binary") + return frozenset(result) + + def disallow_binaries(self) -> None: + self.handle_mutual_excludes( + ":all:", + self.no_binary, + self.only_binary, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/index.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/index.py new file mode 100644 index 000000000..b94c32511 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/index.py @@ -0,0 +1,28 @@ +import urllib.parse + + +class PackageIndex: + """Represents a Package Index and provides easier access to endpoints""" + + __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"] + + def __init__(self, url: str, file_storage_domain: str) -> None: + super().__init__() + self.url = url + self.netloc = urllib.parse.urlsplit(url).netloc + self.simple_url = self._url_for_path("simple") + self.pypi_url = self._url_for_path("pypi") + + # This is part of a temporary hack used to block installs of PyPI + # packages which depend on external urls only necessary until PyPI can + # block such packages themselves + self.file_storage_domain = file_storage_domain + + def _url_for_path(self, path: str) -> str: + return urllib.parse.urljoin(self.url, path) + + +PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org") +TestPyPI = PackageIndex( + "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org" +) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/link.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/link.py new file mode 100644 index 000000000..6069b278b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/link.py @@ -0,0 +1,288 @@ +import functools +import logging +import os +import posixpath +import re +import urllib.parse +from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union + +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + redact_auth_from_url, + split_auth_from_netloc, + splitext, +) +from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.urls import path_to_url, url_to_path + +if TYPE_CHECKING: + from pip._internal.index.collector import HTMLPage + +logger = logging.getLogger(__name__) + + +_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5") + + +class Link(KeyBasedCompareMixin): + """Represents a parsed link from a Package Index's simple URL""" + + __slots__ = [ + "_parsed_url", + "_url", + "comes_from", + "requires_python", + "yanked_reason", + "cache_link_parsing", + ] + + def __init__( + self, + url: str, + comes_from: Optional[Union[str, "HTMLPage"]] = None, + requires_python: Optional[str] = None, + yanked_reason: Optional[str] = None, + cache_link_parsing: bool = True, + ) -> None: + """ + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of HTMLPage where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + :param cache_link_parsing: A flag that is used elsewhere to determine + whether resources retrieved from this link + should be cached. PyPI index urls should + generally have this set to False, for + example. + """ + + # url can be a UNC windows share + if url.startswith("\\\\"): + url = path_to_url(url) + + self._parsed_url = urllib.parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + + super().__init__(key=url, defining_class=Link) + + self.cache_link_parsing = cache_link_parsing + + def __str__(self) -> str: + if self.requires_python: + rp = f" (requires-python:{self.requires_python})" + else: + rp = "" + if self.comes_from: + return "{} (from {}){}".format( + redact_auth_from_url(self._url), self.comes_from, rp + ) + else: + return redact_auth_from_url(str(self._url)) + + def __repr__(self) -> str: + return f"" + + @property + def url(self) -> str: + return self._url + + @property + def filename(self) -> str: + path = self.path.rstrip("/") + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + + name = urllib.parse.unquote(name) + assert name, f"URL {self._url!r} produced no filename" + return name + + @property + def file_path(self) -> str: + return url_to_path(self.url) + + @property + def scheme(self) -> str: + return self._parsed_url.scheme + + @property + def netloc(self) -> str: + """ + This can contain auth information. + """ + return self._parsed_url.netloc + + @property + def path(self) -> str: + return urllib.parse.unquote(self._parsed_url.path) + + def splitext(self) -> Tuple[str, str]: + return splitext(posixpath.basename(self.path.rstrip("/"))) + + @property + def ext(self) -> str: + return self.splitext()[1] + + @property + def url_without_fragment(self) -> str: + scheme, netloc, path, query, fragment = self._parsed_url + return urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + + _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") + + @property + def egg_fragment(self) -> Optional[str]: + match = self._egg_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") + + @property + def subdirectory_fragment(self) -> Optional[str]: + match = self._subdirectory_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES)) + ) + + @property + def hash(self) -> Optional[str]: + match = self._hash_re.search(self._url) + if match: + return match.group(2) + return None + + @property + def hash_name(self) -> Optional[str]: + match = self._hash_re.search(self._url) + if match: + return match.group(1) + return None + + @property + def show_url(self) -> str: + return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0]) + + @property + def is_file(self) -> bool: + return self.scheme == "file" + + def is_existing_dir(self) -> bool: + return self.is_file and os.path.isdir(self.file_path) + + @property + def is_wheel(self) -> bool: + return self.ext == WHEEL_EXTENSION + + @property + def is_vcs(self) -> bool: + from pip._internal.vcs import vcs + + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self) -> bool: + return self.yanked_reason is not None + + @property + def has_hash(self) -> bool: + return self.hash_name is not None + + def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: + """ + Return True if the link has a hash and it is allowed. + """ + if hashes is None or not self.has_hash: + return False + # Assert non-None so mypy knows self.hash_name and self.hash are str. + assert self.hash_name is not None + assert self.hash is not None + + return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) + + +class _CleanResult(NamedTuple): + """Convert link for equivalency check. + + This is used in the resolver to check whether two URL-specified requirements + likely point to the same distribution and can be considered equivalent. This + equivalency logic avoids comparing URLs literally, which can be too strict + (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users. + + Currently this does three things: + + 1. Drop the basic auth part. This is technically wrong since a server can + serve different content based on auth, but if it does that, it is even + impossible to guarantee two URLs without auth are equivalent, since + the user can input different auth information when prompted. So the + practical solution is to assume the auth doesn't affect the response. + 2. Parse the query to avoid the ordering issue. Note that ordering under the + same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are + still considered different. + 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and + hash values, since it should have no impact the downloaded content. Note + that this drops the "egg=" part historically used to denote the requested + project (and extras), which is wrong in the strictest sense, but too many + people are supplying it inconsistently to cause superfluous resolution + conflicts, so we choose to also ignore them. + """ + + parsed: urllib.parse.SplitResult + query: Dict[str, List[str]] + subdirectory: str + hashes: Dict[str, str] + + +def _clean_link(link: Link) -> _CleanResult: + parsed = link._parsed_url + netloc = parsed.netloc.rsplit("@", 1)[-1] + # According to RFC 8089, an empty host in file: means localhost. + if parsed.scheme == "file" and not netloc: + netloc = "localhost" + fragment = urllib.parse.parse_qs(parsed.fragment) + if "egg" in fragment: + logger.debug("Ignoring egg= fragment in %s", link) + try: + # If there are multiple subdirectory values, use the first one. + # This matches the behavior of Link.subdirectory_fragment. + subdirectory = fragment["subdirectory"][0] + except (IndexError, KeyError): + subdirectory = "" + # If there are multiple hash values under the same algorithm, use the + # first one. This matches the behavior of Link.hash_value. + hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment} + return _CleanResult( + parsed=parsed._replace(netloc=netloc, query="", fragment=""), + query=urllib.parse.parse_qs(parsed.query), + subdirectory=subdirectory, + hashes=hashes, + ) + + +@functools.lru_cache(maxsize=None) +def links_equivalent(link1: Link, link2: Link) -> bool: + return _clean_link(link1) == _clean_link(link2) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/scheme.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 000000000..f51190ac6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,31 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"] + + +class Scheme: + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + + __slots__ = SCHEME_KEYS + + def __init__( + self, + platlib: str, + purelib: str, + headers: str, + scripts: str, + data: str, + ) -> None: + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/search_scope.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 000000000..e4e54c2f4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,129 @@ +import itertools +import logging +import os +import posixpath +import urllib.parse +from typing import List + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url + +logger = logging.getLogger(__name__) + + +class SearchScope: + + """ + Encapsulates the locations that pip is configured to search. + """ + + __slots__ = ["find_links", "index_urls"] + + @classmethod + def create( + cls, + find_links: List[str], + index_urls: List[str], + ) -> "SearchScope": + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links: List[str] = [] + for link in find_links: + if link.startswith("~"): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib.parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + ) + + def __init__( + self, + find_links: List[str], + index_urls: List[str], + ) -> None: + self.find_links = find_links + self.index_urls = index_urls + + def get_formatted_locations(self) -> str: + lines = [] + redacted_index_urls = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + for url in self.index_urls: + + redacted_index_url = redact_auth_from_url(url) + + # Parse the URL + purl = urllib.parse.urlsplit(redacted_index_url) + + # URL is generally invalid if scheme and netloc is missing + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not purl.scheme and not purl.netloc: + logger.warning( + 'The index url "%s" seems invalid, please provide a scheme.', + redacted_index_url, + ) + + redacted_index_urls.append(redacted_index_url) + + lines.append( + "Looking in indexes: {}".format(", ".join(redacted_index_urls)) + ) + + if self.find_links: + lines.append( + "Looking in links: {}".format( + ", ".join(redact_auth_from_url(url) for url in self.find_links) + ) + ) + return "\n".join(lines) + + def get_index_urls_locations(self, project_name: str) -> List[str]: + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url: str) -> str: + loc = posixpath.join( + url, urllib.parse.quote(canonicalize_name(project_name)) + ) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith("/"): + loc = loc + "/" + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 000000000..977bc4caa --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,51 @@ +from typing import Optional + +from pip._internal.models.format_control import FormatControl + + +class SelectionPreferences: + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + __slots__ = [ + "allow_yanked", + "allow_all_prereleases", + "format_control", + "prefer_binary", + "ignore_requires_python", + ] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked: bool, + allow_all_prereleases: bool = False, + format_control: Optional[FormatControl] = None, + prefer_binary: bool = False, + ignore_requires_python: Optional[bool] = None, + ) -> None: + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/target_python.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 000000000..744bd7ef5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,110 @@ +import sys +from typing import List, Optional, Tuple + +from pip._vendor.packaging.tags import Tag + +from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot +from pip._internal.utils.misc import normalize_version_info + + +class TargetPython: + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + __slots__ = [ + "_given_py_version_info", + "abis", + "implementation", + "platforms", + "py_version", + "py_version_info", + "_valid_tags", + ] + + def __init__( + self, + platforms: Optional[List[str]] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + abis: Optional[List[str]] = None, + implementation: Optional[str] = None, + ) -> None: + """ + :param platforms: A list of strings or None. If None, searches for + packages that are supported by the current system. Otherwise, will + find packages that can be built on the platforms passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abis: A list of strings or None. This is passed to + compatibility_tags.py's get_supported() function as is. + :param implementation: A string or None. This is passed to + compatibility_tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = ".".join(map(str, py_version_info[:2])) + + self.abis = abis + self.implementation = implementation + self.platforms = platforms + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_tags(). + self._valid_tags: Optional[List[Tag]] = None + + def format_given(self) -> str: + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = ".".join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ("platforms", self.platforms), + ("version_info", display_version), + ("abis", self.abis), + ("implementation", self.implementation), + ] + return " ".join( + f"{key}={value!r}" for key, value in key_values if value is not None + ) + + def get_tags(self) -> List[Tag]: + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platforms=self.platforms, + abis=self.abis, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/models/wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 000000000..aaf218d1a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,89 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re +from typing import Dict, Iterable, List + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename + + +class Wheel: + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P(?P[^\s-]+?)-(?P[^\s-]*?)) + ((-(?P\d[^-]*?))?-(?P[^\s-]+?)-(?P[^\s-]+?)-(?P[^\s-]+?) + \.whl|\.dist-info)$""", + re.VERBOSE, + ) + + def __init__(self, filename: str) -> None: + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.") + self.filename = filename + self.name = wheel_info.group("name").replace("_", "-") + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group("ver").replace("_", "-") + self.build_tag = wheel_info.group("build") + self.pyversions = wheel_info.group("pyver").split(".") + self.abis = wheel_info.group("abi").split(".") + self.plats = wheel_info.group("plat").split(".") + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self) -> List[str]: + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags: List[Tag]) -> int: + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def find_most_preferred_tag( + self, tags: List[Tag], tag_to_priority: Dict[Tag, int] + ) -> int: + """Return the priority of the most preferred tag that one of the wheel's file + tag combinations achieves in the given list of supported tags using the given + tag_to_priority mapping, where lower priorities are more-preferred. + + This is used in place of support_index_min in some cases in order to avoid + an expensive linear scan of a large list of tags. + + :param tags: the PEP 425 tags to check the wheel against. + :param tag_to_priority: a mapping from tag to priority of that tag, where + lower is more preferred. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min( + tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority + ) + + def supported(self, tags: Iterable[Tag]) -> bool: + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 000000000..b51bde91b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..b802d567d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc new file mode 100644 index 000000000..c00cb9da0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc new file mode 100644 index 000000000..3db5f006e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc new file mode 100644 index 000000000..ea4611400 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc new file mode 100644 index 000000000..3b1ff49cc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc new file mode 100644 index 000000000..46f5775b4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc new file mode 100644 index 000000000..4f07bdc4a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc new file mode 100644 index 000000000..7056585e1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/auth.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/auth.py new file mode 100644 index 000000000..ca42798bd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,323 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +import urllib.parse +from typing import Any, Dict, List, Optional, Tuple + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import Request, Response +from pip._vendor.requests.utils import get_netrc_auth + +from pip._internal.utils.logging import getLogger +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.vcs.versioncontrol import AuthInfo + +logger = getLogger(__name__) + +Credentials = Tuple[str, str, str] + +try: + import keyring +except ImportError: + keyring = None # type: ignore[assignment] +except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + keyring = None # type: ignore[assignment] + + +def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: + """Return the tuple auth for a given url from keyring.""" + global keyring + if not url or not keyring: + return None + + try: + try: + get_credential = keyring.get_credential + except AttributeError: + pass + else: + logger.debug("Getting credentials from keyring for %s", url) + cred = get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username: + logger.debug("Getting password from keyring for %s", url) + password = keyring.get_password(url, username) + if password: + return username, password + + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + keyring = None # type: ignore[assignment] + return None + + +class MultiDomainBasicAuth(AuthBase): + def __init__( + self, prompting: bool = True, index_urls: Optional[List[str]] = None + ) -> None: + self.prompting = prompting + self.index_urls = index_urls + self.passwords: Dict[str, AuthInfo] = {} + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save: Optional[Credentials] = None + + def _get_index_url(self, url: str) -> Optional[str]: + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + for u in self.index_urls: + prefix = remove_auth_from_url(u).rstrip("/") + "/" + if url.startswith(prefix): + return u + return None + + def _get_new_credentials( + self, + original_url: str, + allow_netrc: bool = True, + allow_keyring: bool = False, + ) -> AuthInfo: + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + # fmt: off + kr_auth = ( + get_keyring_auth(index_url, username) or + get_keyring_auth(netloc, username) + ) + # fmt: on + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials( + self, original_url: str + ) -> Tuple[str, Optional[str], Optional[str]]: + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Try to get credentials from original url + username, password = self._get_new_credentials(original_url) + + # If credentials not found, use any stored credentials for this netloc. + # Do this if either the username or the password is missing. + # This accounts for the situation in which the user has specified + # the username in the index url, but the password comes from keyring. + if (username is None or password is None) and netloc in self.passwords: + un, pw = self.passwords[netloc] + # It is possible that the cached credentials are for a different username, + # in which case the cache should be ignored. + if username is None or username == un: + username, password = un, pw + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) + # Credentials were not found + or (username is None and password is None) + ), f"Could not load credentials from url: {original_url}" + + return url, username, password + + def __call__(self, req: Request) -> Request: + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password( + self, netloc: str + ) -> Tuple[Optional[str], Optional[str], bool]: + username = ask_input(f"User for {netloc}: ") + if not username: + return None, None, False + auth = get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self) -> bool: + if not keyring: + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp: Response, **kwargs: Any) -> Response: + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib.parse.urlparse(resp.url) + + # Query the keyring for credentials: + username, password = self._get_new_credentials( + resp.url, + allow_netrc=False, + allow_keyring=True, + ) + + # Prompt the user for a new username and password + save = False + if not username and not password: + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = (parsed.netloc, username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp: Response, **kwargs: Any) -> None: + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + "401 Error, Credentials not correct for %s", + resp.request.url, + ) + + def save_credentials(self, resp: Response, **kwargs: Any) -> None: + """Response callback to save credentials on success.""" + assert keyring is not None, "should never reach here without keyring" + if not keyring: + return + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info("Saving credentials to keyring") + keyring.set_password(*creds) + except Exception: + logger.exception("Failed to save credentials") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/cache.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/cache.py new file mode 100644 index 000000000..9dba7edf9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,69 @@ +"""HTTP cache implementation. +""" + +import os +from contextlib import contextmanager +from typing import Iterator, Optional + +from pip._vendor.cachecontrol.cache import BaseCache +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir + + +def is_from_cache(response: Response) -> bool: + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors() -> Iterator[None]: + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except OSError: + pass + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory: str) -> None: + assert directory is not None, "Cache directory must not be None." + super().__init__() + self.directory = directory + + def _get_cache_path(self, name: str) -> str: + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key: str) -> Optional[bytes]: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + with open(path, "rb") as f: + return f.read() + + def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(value) + + replace(f.name, path) + + def delete(self, key: str) -> None: + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/download.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/download.py new file mode 100644 index 000000000..35bc970e2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/download.py @@ -0,0 +1,185 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os +from typing import Iterable, Optional, Tuple + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.cli.progress_bars import get_download_progress_renderer +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.models.index import PyPI +from pip._internal.models.link import Link +from pip._internal.network.cache import is_from_cache +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks +from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp: Response) -> Optional[int]: + try: + return int(resp.headers["content-length"]) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp: Response, + link: Link, + progress_bar: str, +) -> Iterable[bytes]: + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = "{} ({})".format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length) + return renderer(chunks) + + +def sanitize_content_filename(filename: str) -> str: + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition: str, default_filename: str) -> str: + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get("filename") + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp: Response, link: Link) -> str: + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get("content-disposition") + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext: Optional[str] = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(resp.headers.get("content-type", "")) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session: PipSession, link: Link) -> Response: + target_url = link.url.split("#", 1)[0] + resp = session.get(target_url, headers=HEADERS, stream=True) + raise_for_status(resp) + return resp + + +class Downloader: + def __init__( + self, + session: PipSession, + progress_bar: str, + ) -> None: + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link: Link, location: str) -> Tuple[str, str]: + """Download the file given by link into location.""" + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, "wb") as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get("Content-Type", "") + return filepath, content_type + + +class BatchDownloader: + def __init__( + self, + session: PipSession, + progress_bar: str, + ) -> None: + self._session = session + self._progress_bar = progress_bar + + def __call__( + self, links: Iterable[Link], location: str + ) -> Iterable[Tuple[Link, Tuple[str, str]]]: + """Download the files given by links into location.""" + for link in links: + try: + resp = _http_get_download(self._session, link) + except NetworkConnectionError as e: + assert e.response is not None + logger.critical( + "HTTP error %s while getting %s", + e.response.status_code, + link, + ) + raise + + filename = _get_http_response_filename(resp, link) + filepath = os.path.join(location, filename) + + chunks = _prepare_download(resp, link, self._progress_bar) + with open(filepath, "wb") as content_file: + for chunk in chunks: + content_file.write(chunk) + content_type = resp.headers.get("Content-Type", "") + yield link, (filepath, content_type) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py new file mode 100644 index 000000000..c9e44d5be --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py @@ -0,0 +1,210 @@ +"""Lazy ZIP over HTTP""" + +__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"] + +from bisect import bisect_left, bisect_right +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, Dict, Iterator, List, Optional, Tuple +from zipfile import BadZipfile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution +from pip._internal.network.session import PipSession +from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks + + +class HTTPRangeRequestUnsupported(Exception): + pass + + +def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution: + """Return a distribution object from the given wheel URL. + + This uses HTTP range requests to only fetch the potion of the wheel + containing metadata, just enough for the object to be constructed. + If such requests are not supported, HTTPRangeRequestUnsupported + is raised. + """ + with LazyZipOverHTTP(url, session) as zf: + # For read-only ZIP files, ZipFile only needs methods read, + # seek, seekable and tell, not the whole IO protocol. + wheel = MemoryWheel(zf.name, zf) # type: ignore + # After context manager exit, wheel.name + # is an invalid file by intention. + return get_wheel_distribution(wheel, canonicalize_name(name)) + + +class LazyZipOverHTTP: + """File-like object mapped to a ZIP file over HTTP. + + This uses HTTP range requests to lazily fetch the file's content, + which is supposed to be fed to ZipFile. If such requests are not + supported by the server, raise HTTPRangeRequestUnsupported + during initialization. + """ + + def __init__( + self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE + ) -> None: + head = session.head(url, headers=HEADERS) + raise_for_status(head) + assert head.status_code == 200 + self._session, self._url, self._chunk_size = session, url, chunk_size + self._length = int(head.headers["Content-Length"]) + self._file = NamedTemporaryFile() + self.truncate(self._length) + self._left: List[int] = [] + self._right: List[int] = [] + if "bytes" not in head.headers.get("Accept-Ranges", "none"): + raise HTTPRangeRequestUnsupported("range request is not supported") + self._check_zip() + + @property + def mode(self) -> str: + """Opening mode, which is always rb.""" + return "rb" + + @property + def name(self) -> str: + """Path to the underlying file.""" + return self._file.name + + def seekable(self) -> bool: + """Return whether random access is supported, which is True.""" + return True + + def close(self) -> None: + """Close the file.""" + self._file.close() + + @property + def closed(self) -> bool: + """Whether the file is closed.""" + return self._file.closed + + def read(self, size: int = -1) -> bytes: + """Read up to size bytes from the object and return them. + + As a convenience, if size is unspecified or -1, + all bytes until EOF are returned. Fewer than + size bytes may be returned if EOF is reached. + """ + download_size = max(size, self._chunk_size) + start, length = self.tell(), self._length + stop = length if size < 0 else min(start + download_size, length) + start = max(0, stop - download_size) + self._download(start, stop - 1) + return self._file.read(size) + + def readable(self) -> bool: + """Return whether the file is readable, which is True.""" + return True + + def seek(self, offset: int, whence: int = 0) -> int: + """Change stream position and return the new absolute position. + + Seek to offset relative position indicated by whence: + * 0: Start of stream (the default). pos should be >= 0; + * 1: Current position - pos may be negative; + * 2: End of stream - pos usually negative. + """ + return self._file.seek(offset, whence) + + def tell(self) -> int: + """Return the current position.""" + return self._file.tell() + + def truncate(self, size: Optional[int] = None) -> int: + """Resize the stream to the given size in bytes. + + If size is unspecified resize to the current position. + The current stream position isn't changed. + + Return the new file size. + """ + return self._file.truncate(size) + + def writable(self) -> bool: + """Return False.""" + return False + + def __enter__(self) -> "LazyZipOverHTTP": + self._file.__enter__() + return self + + def __exit__(self, *exc: Any) -> Optional[bool]: + return self._file.__exit__(*exc) + + @contextmanager + def _stay(self) -> Iterator[None]: + """Return a context manager keeping the position. + + At the end of the block, seek back to original position. + """ + pos = self.tell() + try: + yield + finally: + self.seek(pos) + + def _check_zip(self) -> None: + """Check and download until the file is a valid ZIP.""" + end = self._length - 1 + for start in reversed(range(0, end, self._chunk_size)): + self._download(start, end) + with self._stay(): + try: + # For read-only ZIP files, ZipFile only needs + # methods read, seek, seekable and tell. + ZipFile(self) # type: ignore + except BadZipfile: + pass + else: + break + + def _stream_response( + self, start: int, end: int, base_headers: Dict[str, str] = HEADERS + ) -> Response: + """Return HTTP response to a range request from start to end.""" + headers = base_headers.copy() + headers["Range"] = f"bytes={start}-{end}" + # TODO: Get range requests to be correctly cached + headers["Cache-Control"] = "no-cache" + return self._session.get(self._url, headers=headers, stream=True) + + def _merge( + self, start: int, end: int, left: int, right: int + ) -> Iterator[Tuple[int, int]]: + """Return an iterator of intervals to be fetched. + + Args: + start (int): Start of needed interval + end (int): End of needed interval + left (int): Index of first overlapping downloaded data + right (int): Index after last overlapping downloaded data + """ + lslice, rslice = self._left[left:right], self._right[left:right] + i = start = min([start] + lslice[:1]) + end = max([end] + rslice[-1:]) + for j, k in zip(lslice, rslice): + if j > i: + yield i, j - 1 + i = k + 1 + if i <= end: + yield i, end + self._left[left:right], self._right[left:right] = [start], [end] + + def _download(self, start: int, end: int) -> None: + """Download bytes from start to end inclusively.""" + with self._stay(): + left = bisect_left(self._right, start) + right = bisect_right(self._left, end) + for start, end in self._merge(start, end, left, right): + response = self._stream_response(start, end) + response.raise_for_status() + self.seek(start) + for chunk in response_chunks(response, self._chunk_size): + self._file.write(chunk) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/session.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/session.py new file mode 100644 index 000000000..cbe743ba6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/session.py @@ -0,0 +1,454 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +import email.utils +import io +import ipaddress +import json +import logging +import mimetypes +import os +import platform +import shutil +import subprocess +import sys +import urllib.parse +import warnings +from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union + +from pip._vendor import requests, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.models import PreparedRequest, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3.connectionpool import ConnectionPool +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.metadata import get_default_environment +from pip._internal.models.link import Link +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache + +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import build_url_from_netloc, parse_netloc +from pip._internal.utils.urls import url_to_path + +logger = logging.getLogger(__name__) + +SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS: List[SecureOrigin] = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + "BUILD_BUILDID", + # Jenkins + "BUILD_ID", + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + "CI", + # Explicit environment variable. + "PIP_IS_CI", +) + + +def looks_like_ci() -> bool: + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent() -> str: + """ + Return a string representing the user agent. + """ + data: Dict[str, Any] = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == "CPython": + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "PyPy": + pypy_version_info = sys.pypy_version_info # type: ignore + if pypy_version_info.releaselevel == "final": + pypy_version_info = pypy_version_info[:3] + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == "Jython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == "IronPython": + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + + linux_distribution = distro.name(), distro.version(), distro.codename() + distro_infos: Dict[str, Any] = dict( + filter( + lambda x: x[1], + zip(["name", "version", "id"], linux_distribution), + ) + ) + libc = dict( + filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + ) + ) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_dist = get_default_environment().get_distribution("setuptools") + if setuptools_dist is not None: + data["setuptools_version"] = str(setuptools_dist.version) + + if shutil.which("rustc") is not None: + # If for any reason `rustc --version` fails, silently ignore it + try: + rustc_output = subprocess.check_output( + ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 + ) + except Exception: + pass + else: + if rustc_output.startswith(b"rustc "): + # The format of `rustc --version` is: + # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'` + # We extract just the middle (1.52.1) part + data["rustc_version"] = rustc_output.split(b" ")[1].decode() + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: Optional[Union[float, Tuple[float, float]]] = None, + verify: Union[bool, str] = True, + cert: Optional[Union[str, Tuple[str, str]]] = None, + proxies: Optional[Mapping[str, str]] = None, + ) -> Response: + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + # format the exception raised as a io.BytesIO object, + # to return a better error message: + resp.status_code = 404 + resp.reason = type(exc).__name__ + resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8")) + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + } + ) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self) -> None: + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: Union[bool, str], + cert: Optional[Union[str, Tuple[str, str]]], + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class InsecureCacheControlAdapter(CacheControlAdapter): + def cert_verify( + self, + conn: ConnectionPool, + url: str, + verify: Union[bool, str], + cert: Optional[Union[str, Tuple[str, str]]], + ) -> None: + super().cert_verify(conn=conn, url=url, verify=False, cert=cert) + + +class PipSession(requests.Session): + + timeout: Optional[int] = None + + def __init__( + self, + *args: Any, + retries: int = 0, + cache: Optional[str] = None, + trusted_hosts: Sequence[str] = (), + index_urls: Optional[List[str]] = None, + **kwargs: Any, + ) -> None: + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + super().__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = [] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) # type: ignore + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching so we'll use it for all http:// URLs. + # If caching is disabled, we will also use it for + # https:// hosts that we've marked as ignoring + # TLS errors for (trusted-hosts). + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + # We want to _only_ cache responses on securely fetched origins or when + # the host is specified as trusted. We do this because + # we can't validate the response of an insecurely/untrusted fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + self._trusted_host_adapter = InsecureCacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + self._trusted_host_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def update_index_urls(self, new_index_urls: List[str]) -> None: + """ + :param new_index_urls: New index urls to update the authentication + handler with. + """ + self.auth.index_urls = new_index_urls + + def add_trusted_host( + self, host: str, source: Optional[str] = None, suppress_logging: bool = False + ) -> None: + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = f"adding trusted host: {host!r}" + if source is not None: + msg += f" (from {source})" + logger.info(msg) + + host_port = parse_netloc(host) + if host_port not in self.pip_trusted_origins: + self.pip_trusted_origins.append(host_port) + + self.mount( + build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter + ) + self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) + if not host_port[1]: + self.mount( + build_url_from_netloc(host, scheme="http") + ":", + self._trusted_host_adapter, + ) + # Mount wildcard ports for the same host. + self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) + + def iter_secure_origins(self) -> Iterator[SecureOrigin]: + yield from SECURE_ORIGINS + for host, port in self.pip_trusted_origins: + yield ("*", host, "*" if port is None else port) + + def is_secure_origin(self, location: Link) -> bool: + # Determine if this url used a secure transport mechanism + parsed = urllib.parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, + parsed.hostname, + parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit("+", 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address(origin_host) + network = ipaddress.ip_network(secure_host) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host + and origin_host.lower() != secure_host.lower() + and secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port + and secure_port != "*" + and secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super().request(method, url, *args, **kwargs) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/utils.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/utils.py new file mode 100644 index 000000000..094cf1b4a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,96 @@ +from typing import Dict, Iterator + +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.exceptions import NetworkConnectionError + +# The following comments and HTTP headers were originally added by +# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03. +# +# We use Accept-Encoding: identity here because requests defaults to +# accepting compressed responses. This breaks in a variety of ways +# depending on how the server is configured. +# - Some servers will notice that the file isn't a compressible file +# and will leave the file alone and with an empty Content-Encoding +# - Some servers will notice that the file is already compressed and +# will leave the file alone, adding a Content-Encoding: gzip header +# - Some servers won't notice anything at all and will take a file +# that's already been compressed and compress it again, and set +# the Content-Encoding: gzip header +# By setting this to request only the identity encoding we're hoping +# to eliminate the third case. Hopefully there does not exist a server +# which when given a file will notice it is already compressed and that +# you're not asking for a compressed file and will then decompress it +# before sending because if that's the case I don't think it'll ever be +# possible to make this work. +HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"} + + +def raise_for_status(resp: Response) -> None: + http_error_msg = "" + if isinstance(resp.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. + try: + reason = resp.reason.decode("utf-8") + except UnicodeDecodeError: + reason = resp.reason.decode("iso-8859-1") + else: + reason = resp.reason + + if 400 <= resp.status_code < 500: + http_error_msg = ( + f"{resp.status_code} Client Error: {reason} for url: {resp.url}" + ) + + elif 500 <= resp.status_code < 600: + http_error_msg = ( + f"{resp.status_code} Server Error: {reason} for url: {resp.url}" + ) + + if http_error_msg: + raise NetworkConnectionError(http_error_msg, response=resp) + + +def response_chunks( + response: Response, chunk_size: int = CONTENT_CHUNK_SIZE +) -> Iterator[bytes]: + """Given a requests Response, provide the data chunks.""" + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py b/myenv/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 000000000..4a7d55d0e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,60 @@ +"""xmlrpclib.Transport implementation +""" + +import logging +import urllib.parse +import xmlrpc.client +from typing import TYPE_CHECKING, Tuple + +from pip._internal.exceptions import NetworkConnectionError +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status + +if TYPE_CHECKING: + from xmlrpc.client import _HostType, _Marshallable + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc.client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__( + self, index_url: str, session: PipSession, use_datetime: bool = False + ) -> None: + super().__init__(use_datetime) + index_parts = urllib.parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request( + self, + host: "_HostType", + handler: str, + request_body: bytes, + verbose: bool = False, + ) -> Tuple["_Marshallable", ...]: + assert isinstance(host, str) + parts = (self._scheme, host, handler, None, None, None) + url = urllib.parse.urlunparse(parts) + try: + headers = {"Content-Type": "text/xml"} + response = self._session.post( + url, + data=request_body, + headers=headers, + stream=True, + ) + raise_for_status(response) + self.verbose = verbose + return self.parse_response(response.raw) + except NetworkConnectionError as exc: + assert exc.response + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, + url, + ) + raise diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..afaca4e15 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc new file mode 100644 index 000000000..9cfc3efa2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc new file mode 100644 index 000000000..8f0e98096 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc new file mode 100644 index 000000000..7ec329db3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..e78a56e2b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc new file mode 100644 index 000000000..c4893d5c2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc new file mode 100644 index 000000000..dfe0297eb Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc new file mode 100644 index 000000000..b0c5f9f41 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..dd5e91914 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc new file mode 100644 index 000000000..670932b9a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc new file mode 100644 index 000000000..596be4fd5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 000000000..e2b7b4445 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,39 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import ( + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_metadata( + build_env: BuildEnvironment, backend: Pep517HookCaller, details: str +) -> str: + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") + with backend.subprocess_runner(runner): + try: + distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py new file mode 100644 index 000000000..4c3f48b6c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py @@ -0,0 +1,41 @@ +"""Metadata generation logic for source distributions. +""" + +import os + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import ( + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + + +def generate_editable_metadata( + build_env: BuildEnvironment, backend: Pep517HookCaller, details: str +) -> str: + """Generate metadata using mechanisms described in PEP 660. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel/editable, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message( + "Preparing editable metadata (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 000000000..e60988d64 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,74 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.build_env import BuildEnvironment +from pip._internal.cli.spinners import open_spinner +from pip._internal.exceptions import ( + InstallationError, + InstallationSubprocessError, + MetadataGenerationFailed, +) +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +def _find_egg_info(directory: str) -> str: + """Find an .egg-info subdirectory in `directory`.""" + filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")] + + if not filenames: + raise InstallationError(f"No .egg-info directory found in {directory}") + + if len(filenames) > 1: + raise InstallationError( + "More than one .egg-info directory found in {}".format(directory) + ) + + return os.path.join(directory, filenames[0]) + + +def generate_metadata( + build_env: BuildEnvironment, + setup_py_path: str, + source_dir: str, + isolated: bool, + details: str, +) -> str: + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + "Running setup.py (path:%s) egg_info for package %s", + setup_py_path, + details, + ) + + egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + with open_spinner("Preparing metadata (setup.py)") as spinner: + try: + call_subprocess( + args, + cwd=source_dir, + command_desc="python setup.py egg_info", + spinner=spinner, + ) + except InstallationSubprocessError as error: + raise MetadataGenerationFailed(package_details=details) from error + + # Return the .egg-info directory. + return _find_egg_info(egg_info_dir) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 000000000..b0d2fc9ea --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,37 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name: str, + backend: Pep517HookCaller, + metadata_directory: str, + tempd: str, +) -> Optional[str]: + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( + f"Building wheel for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error("Failed building wheel for %s", name) + return None + return os.path.join(tempd, wheel_name) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py new file mode 100644 index 000000000..cf7b01aed --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py @@ -0,0 +1,46 @@ +import logging +import os +from typing import Optional + +from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller + +from pip._internal.utils.subprocess import runner_with_spinner_message + +logger = logging.getLogger(__name__) + + +def build_wheel_editable( + name: str, + backend: Pep517HookCaller, + metadata_directory: str, + tempd: str, +) -> Optional[str]: + """Build one InstallRequirement using the PEP 660 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + try: + logger.debug("Destination directory: %s", tempd) + + runner = runner_with_spinner_message( + f"Building editable for {name} (pyproject.toml)" + ) + with backend.subprocess_runner(runner): + try: + wheel_name = backend.build_editable( + tempd, + metadata_directory=metadata_directory, + ) + except HookMissing as e: + logger.error( + "Cannot build editable %s because the build " + "backend does not have the %s hook", + name, + e, + ) + return None + except Exception: + logger.error("Failed building editable for %s", name) + return None + return os.path.join(tempd, wheel_name) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 000000000..c5f0492cc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,102 @@ +import logging +import os.path +from typing import List, Optional + +from pip._internal.cli.spinners import open_spinner +from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args +from pip._internal.utils.subprocess import call_subprocess, format_command_args + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args: List[str], + command_output: str, +) -> str: + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = f"Command arguments: {command_desc}\n" + + if not command_output: + text += "Command output: None" + elif logger.getEffectiveLevel() > logging.DEBUG: + text += "Command output: [use --verbose to show]" + else: + if not command_output.endswith("\n"): + command_output += "\n" + text += f"Command output:\n{command_output}" + + return text + + +def get_legacy_build_wheel_path( + names: List[str], + temp_dir: str, + name: str, + command_args: List[str], + command_output: str, +) -> Optional[str]: + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ("Legacy build of wheel for {!r} created no files.\n").format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + "Legacy build of wheel for {!r} created more than one file.\n" + "Filenames (choosing first): {}\n" + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name: str, + setup_py_path: str, + source_dir: str, + global_options: List[str], + build_options: List[str], + tempd: str, +) -> Optional[str]: + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = f"Building wheel for {name} (setup.py)" + with open_spinner(spin_message) as spinner: + logger.debug("Destination directory: %s", tempd) + + try: + output = call_subprocess( + wheel_args, + command_desc="python setup.py bdist_wheel", + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error("Failed building wheel for %s", name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/check.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/check.py new file mode 100644 index 000000000..fb3ac8b9c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,149 @@ +"""Validation of dependencies of packages +""" + +import logging +from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple + +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.metadata import get_default_environment +from pip._internal.metadata.base import DistributionVersion +from pip._internal.req.req_install import InstallRequirement + +logger = logging.getLogger(__name__) + + +class PackageDetails(NamedTuple): + version: DistributionVersion + dependencies: List[Requirement] + + +# Shorthands +PackageSet = Dict[NormalizedName, PackageDetails] +Missing = Tuple[NormalizedName, Requirement] +Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement] + +MissingDict = Dict[NormalizedName, List[Missing]] +ConflictingDict = Dict[NormalizedName, List[Conflicting]] +CheckResult = Tuple[MissingDict, ConflictingDict] +ConflictDetails = Tuple[PackageSet, CheckResult] + + +def create_package_set_from_installed() -> Tuple[PackageSet, bool]: + """Converts a list of distributions into a PackageSet.""" + package_set = {} + problems = False + env = get_default_environment() + for dist in env.iter_installed_distributions(local_only=False, skip=()): + name = dist.canonical_name + try: + dependencies = list(dist.iter_dependencies()) + package_set[name] = PackageDetails(dist.version, dependencies) + except (OSError, ValueError) as e: + # Don't crash on unreadable or broken metadata. + logger.warning("Error parsing requirements for %s: %s", name, e) + problems = True + return package_set, problems + + +def check_package_set( + package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None +) -> CheckResult: + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + + missing = {} + conflicting = {} + + for package_name, package_detail in package_set.items(): + # Info about dependencies of package_name + missing_deps: Set[Missing] = set() + conflicting_deps: Set[Conflicting] = set() + + if should_ignore and should_ignore(package_name): + continue + + for req in package_detail.dependencies: + name = canonicalize_name(req.name) + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate() + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails: + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set, _ = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ), + ) + + +def _simulate_installation_of( + to_install: List[InstallRequirement], package_set: PackageSet +) -> Set[NormalizedName]: + """Computes the version of packages after installing to_install.""" + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_metadata_distribution() + name = dist.canonical_name + package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies())) + + installed.add(name) + + return installed + + +def _create_whitelist( + would_be_installed: Set[NormalizedName], package_set: PackageSet +) -> Set[NormalizedName]: + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].dependencies: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/freeze.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 000000000..456554085 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,254 @@ +import collections +import logging +import os +from typing import Container, Dict, Iterable, Iterator, List, NamedTuple, Optional, Set + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.metadata import BaseDistribution, get_environment +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference + +logger = logging.getLogger(__name__) + + +class _EditableInfo(NamedTuple): + requirement: str + comments: List[str] + + +def freeze( + requirement: Optional[List[str]] = None, + local_only: bool = False, + user_only: bool = False, + paths: Optional[List[str]] = None, + isolated: bool = False, + exclude_editable: bool = False, + skip: Container[str] = (), +) -> Iterator[str]: + installations: Dict[str, FrozenRequirement] = {} + + dists = get_environment(paths).iter_installed_distributions( + local_only=local_only, + skip=(), + user_only=user_only, + ) + for dist in dists: + req = FrozenRequirement.from_dist(dist) + if exclude_editable and req.editable: + continue + installations[req.canonical_name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options: Set[str] = set() + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files: Dict[str, List[str]] = collections.defaultdict(list) + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if ( + not line.strip() + or line.strip().startswith("#") + or line.startswith( + ( + "-r", + "--requirement", + "-f", + "--find-links", + "-i", + "--index-url", + "--pre", + "--trusted-host", + "--process-dependency-links", + "--extra-index-url", + "--use-feature", + ) + ) + ): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith("-e") or line.startswith("--editable"): + if line.startswith("-e"): + line = line[2:].strip() + else: + line = line[len("--editable") :].strip().lstrip("=") + line_req = install_req_from_editable( + line, + isolated=isolated, + ) + else: + line_req = install_req_from_line( + COMMENT_RE.sub("", line).strip(), + isolated=isolated, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, + line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + else: + line_req_canonical_name = canonicalize_name(line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub("", line).strip(), + line_req.name, + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in req_files.items(): + if len(files) > 1: + logger.warning( + "Requirement %s included multiple times [%s]", + name, + ", ".join(sorted(set(files))), + ) + + yield ("## The following requirements were added by pip freeze:") + for installation in sorted(installations.values(), key=lambda x: x.name.lower()): + if installation.canonical_name not in skip: + yield str(installation).rstrip() + + +def _format_as_name_version(dist: BaseDistribution) -> str: + if isinstance(dist.version, Version): + return f"{dist.raw_name}=={dist.version}" + return f"{dist.raw_name}==={dist.version}" + + +def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: + """ + Compute and return values (req, comments) for use in + FrozenRequirement.from_dist(). + """ + editable_project_location = dist.editable_project_location + assert editable_project_location + location = os.path.normcase(os.path.abspath(editable_project_location)) + + from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs + + vcs_backend = vcs.get_backend_for_dir(location) + + if vcs_backend is None: + display = _format_as_name_version(dist) + logger.debug( + 'No VCS found for editable requirement "%s" in: %r', + display, + location, + ) + return _EditableInfo( + requirement=location, + comments=[f"# Editable install with no version control ({display})"], + ) + + vcs_name = type(vcs_backend).__name__ + + try: + req = vcs_backend.get_src_requirement(location, dist.raw_name) + except RemoteNotFoundError: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[f"# Editable {vcs_name} install with no remote ({display})"], + ) + except RemoteNotValidError as ex: + display = _format_as_name_version(dist) + return _EditableInfo( + requirement=location, + comments=[ + f"# Editable {vcs_name} install ({display}) with either a deleted " + f"local remote or invalid URI:", + f"# '{ex.url}'", + ], + ) + except BadCommand: + logger.warning( + "cannot determine version of editable source in %s " + "(%s command not found in path)", + location, + vcs_backend.name, + ) + return _EditableInfo(requirement=location, comments=[]) + except InstallationError as exc: + logger.warning("Error when trying to get requirement for VCS system %s", exc) + else: + return _EditableInfo(requirement=req, comments=[]) + + logger.warning("Could not determine repository location of %s", location) + + return _EditableInfo( + requirement=location, + comments=["## !! Could not determine repository location"], + ) + + +class FrozenRequirement: + def __init__( + self, + name: str, + req: str, + editable: bool, + comments: Iterable[str] = (), + ) -> None: + self.name = name + self.canonical_name = canonicalize_name(name) + self.req = req + self.editable = editable + self.comments = comments + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement": + editable = dist.editable + if editable: + req, comments = _get_editable_info(dist) + else: + comments = [] + direct_url = dist.direct_url + if direct_url: + # if PEP 610 metadata is present, use it + req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name) + else: + # name==version requirement + req = _format_as_name_version(dist) + + return cls(dist.raw_name, req, editable, comments=comments) + + def __str__(self) -> str: + req = self.req + if self.editable: + req = f"-e {req}" + return "\n".join(list(self.comments) + [str(req)]) + "\n" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 000000000..24d6a5dd3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..f33a64d20 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc new file mode 100644 index 000000000..9f5b5a1d3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc new file mode 100644 index 000000000..bf06d4c97 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..67aa72e48 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 000000000..bb548cdca --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,47 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging +from typing import List, Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options: List[str], + global_options: Sequence[str], + prefix: Optional[str], + home: Optional[str], + use_user_site: bool, + name: str, + setup_py_path: str, + isolated: bool, + build_env: BuildEnvironment, + unpacked_source_directory: str, +) -> None: + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info("Running setup.py develop for %s", name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + command_desc="python setup.py develop", + cwd=unpacked_source_directory, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/legacy.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/legacy.py new file mode 100644 index 000000000..5b7ef9017 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/legacy.py @@ -0,0 +1,120 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +from distutils.util import change_root +from typing import List, Optional, Sequence + +from pip._internal.build_env import BuildEnvironment +from pip._internal.exceptions import InstallationError, LegacyInstallFailure +from pip._internal.models.scheme import Scheme +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_install_args +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +def write_installed_files_from_setuptools_record( + record_lines: List[str], + root: Optional[str], + req_description: str, +) -> None: + def prepend_root(path: str) -> str: + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith(".egg-info"): + egg_info_dir = prepend_root(directory) + break + else: + message = ( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(req_description) + raise InstallationError(message) + + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir)) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, "installed-files.txt") + with open(inst_files_path, "w") as f: + f.write("\n".join(new_lines) + "\n") + + +def install( + install_options: List[str], + global_options: Sequence[str], + root: Optional[str], + home: Optional[str], + prefix: Optional[str], + use_user_site: bool, + pycompile: bool, + scheme: Scheme, + setup_py_path: str, + isolated: bool, + req_name: str, + build_env: BuildEnvironment, + unpacked_source_directory: str, + req_description: str, +) -> bool: + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + try: + record_filename = os.path.join(temp_dir.path, "install-record.txt") + install_args = make_setuptools_install_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + f"Running setup.py install for {req_name}" + ) + with build_env: + runner( + cmd=install_args, + cwd=unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug("Record file %s not found", record_filename) + # Signal to the caller that we didn't install the new package + return False + + except Exception as e: + # Signal to the caller that we didn't install the new package + raise LegacyInstallFailure(package_details=req_name) from e + + # At this point, we have successfully installed the requirement. + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + write_installed_files_from_setuptools_record(record_lines, root, req_description) + return True diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 000000000..e191b1343 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,738 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +import collections +import compileall +import contextlib +import csv +import importlib +import logging +import os.path +import re +import shutil +import sys +import warnings +from base64 import urlsafe_b64encode +from email.message import Message +from itertools import chain, filterfalse, starmap +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Iterable, + Iterator, + List, + NewType, + Optional, + Sequence, + Set, + Tuple, + Union, + cast, +) +from zipfile import ZipFile, ZipInfo + +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.metadata import ( + BaseDistribution, + FilesystemWheel, + get_wheel_distribution, +) +from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl +from pip._internal.models.scheme import SCHEME_KEYS, Scheme +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition +from pip._internal.utils.unpacking import ( + current_umask, + is_within_directory, + set_extracted_file_to_default_mode_plus_executable, + zip_item_is_executable, +) +from pip._internal.utils.wheel import parse_wheel + +if TYPE_CHECKING: + from typing import Protocol + + class File(Protocol): + src_record_path: "RecordPath" + dest_path: str + changed: bool + + def save(self) -> None: + pass + + +logger = logging.getLogger(__name__) + +RecordPath = NewType("RecordPath", str) +InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]] + + +def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]: + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") + return (digest, str(length)) + + +def csv_io_kwargs(mode: str) -> Dict[str, Any]: + """Return keyword arguments to properly open a CSV file + in the given mode. + """ + return {"mode": mode, "newline": "", "encoding": "utf-8"} + + +def fix_script(path: str) -> bool: + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + assert os.path.isfile(path) + + with open(path, "rb") as script: + firstline = script.readline() + if not firstline.startswith(b"#!python"): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b"#!" + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, "wb") as script: + script.write(firstline) + script.write(rest) + return True + + +def wheel_root_is_purelib(metadata: Message) -> bool: + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]: + console_scripts = {} + gui_scripts = {} + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + console_scripts[entry_point.name] = entry_point.value + elif entry_point.group == "gui_scripts": + gui_scripts[entry_point.name] = entry_point.value + return console_scripts, gui_scripts + + +def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set) + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) + for i in os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for: Dict[str, Set[str]] = { + parent_dir: scripts + for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts: List[str] = sorted(dir_scripts) + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH.".format( + start_text, parent_dir + ) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def _normalized_outrows( + outrows: Iterable[InstalledCSVRow], +) -> List[Tuple[str, str, str]]: + """Normalize the given rows of a RECORD file. + + Items in each row are converted into str. Rows are then sorted to make + the value more predictable for tests. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted( + (record_path, hash_, str(size)) for record_path, hash_, size in outrows + ) + + +def _record_to_fs_path(record_path: RecordPath) -> str: + return record_path + + +def _fs_to_record_path(path: str, relative_to: Optional[str] = None) -> RecordPath: + if relative_to is not None: + # On Windows, do not handle relative paths if they belong to different + # logical disks + if ( + os.path.splitdrive(path)[0].lower() + == os.path.splitdrive(relative_to)[0].lower() + ): + path = os.path.relpath(path, relative_to) + path = path.replace(os.path.sep, "/") + return cast("RecordPath", path) + + +def get_csv_rows_for_installed( + old_csv_rows: List[List[str]], + installed: Dict[RecordPath, RecordPath], + changed: Set[RecordPath], + generated: List[str], + lib_dir: str, +) -> List[InstalledCSVRow]: + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows: List[InstalledCSVRow] = [] + for row in old_csv_rows: + if len(row) > 3: + logger.warning("RECORD line has more than three elements: %s", row) + old_record_path = cast("RecordPath", row[0]) + new_record_path = installed.pop(old_record_path, old_record_path) + if new_record_path in changed: + digest, length = rehash(_record_to_fs_path(new_record_path)) + else: + digest = row[1] if len(row) > 1 else "" + length = row[2] if len(row) > 2 else "" + installed_rows.append((new_record_path, digest, length)) + for f in generated: + path = _fs_to_record_path(f, lib_dir) + digest, length = rehash(f) + installed_rows.append((path, digest, length)) + for installed_record_path in installed.values(): + installed_rows.append((installed_record_path, "", "")) + return installed_rows + + +def get_console_script_specs(console: Dict[str, str]) -> List[str]: + """ + Given the mapping from entrypoint name to callable, return the relevant + console script specs. + """ + # Don't mutate caller's version + console = console.copy() + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop("pip", None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("pip = " + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + "pip{} = {}".format(sys.version_info[0], pip_script) + ) + + scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop("easy_install", None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append("easy_install = " + easy_install_script) + + scripts_to_generate.append( + "easy_install-{} = {}".format( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r"easy_install(-\d\.\d)?$", k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console entry points specified in the wheel + scripts_to_generate.extend(starmap("{} = {}".format, console.items())) + + return scripts_to_generate + + +class ZipBackedFile: + def __init__( + self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile + ) -> None: + self.src_record_path = src_record_path + self.dest_path = dest_path + self._zip_file = zip_file + self.changed = False + + def _getinfo(self) -> ZipInfo: + return self._zip_file.getinfo(self.src_record_path) + + def save(self) -> None: + # directory creation is lazy and after file filtering + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + parent_dir = os.path.dirname(self.dest_path) + ensure_dir(parent_dir) + + # When we open the output file below, any existing file is truncated + # before we start writing the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(self.dest_path): + os.unlink(self.dest_path) + + zipinfo = self._getinfo() + + with self._zip_file.open(zipinfo) as f: + with open(self.dest_path, "wb") as dest: + shutil.copyfileobj(f, dest) + + if zip_item_is_executable(zipinfo): + set_extracted_file_to_default_mode_plus_executable(self.dest_path) + + +class ScriptFile: + def __init__(self, file: "File") -> None: + self._file = file + self.src_record_path = self._file.src_record_path + self.dest_path = self._file.dest_path + self.changed = False + + def save(self) -> None: + self._file.save() + self.changed = fix_script(self.dest_path) + + +class MissingCallableSuffix(InstallationError): + def __init__(self, entry_point: str) -> None: + super().__init__( + "Invalid script entry point: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry_point) + ) + + +def _raise_for_invalid_entrypoint(specification: str) -> None: + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification: str, options: Dict[str, Any] = None) -> List[str]: + _raise_for_invalid_entrypoint(specification) + return super().make(specification, options) + + +def _install_wheel( + name: str, + wheel_zip: ZipFile, + wheel_path: str, + scheme: Scheme, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: Optional[DirectUrl] = None, + requested: bool = False, +) -> None: + """Install a wheel. + + :param name: Name of the project to install + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed: Dict[RecordPath, RecordPath] = {} + changed: Set[RecordPath] = set() + generated: List[str] = [] + + def record_installed( + srcfile: RecordPath, destfile: str, modified: bool = False + ) -> None: + """Map archive RECORD paths to installation RECORD paths.""" + newpath = _fs_to_record_path(destfile, lib_dir) + installed[srcfile] = newpath + if modified: + changed.add(_fs_to_record_path(destfile)) + + def is_dir_path(path: RecordPath) -> bool: + return path.endswith("/") + + def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None: + if not is_within_directory(dest_dir_path, target_path): + message = ( + "The wheel {!r} has a file {!r} trying to install" + " outside the target directory {!r}" + ) + raise InstallationError( + message.format(wheel_path, target_path, dest_dir_path) + ) + + def root_scheme_file_maker( + zip_file: ZipFile, dest: str + ) -> Callable[[RecordPath], "File"]: + def make_root_scheme_file(record_path: RecordPath) -> "File": + normed_path = os.path.normpath(record_path) + dest_path = os.path.join(dest, normed_path) + assert_no_path_traversal(dest, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_root_scheme_file + + def data_scheme_file_maker( + zip_file: ZipFile, scheme: Scheme + ) -> Callable[[RecordPath], "File"]: + scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS} + + def make_data_scheme_file(record_path: RecordPath) -> "File": + normed_path = os.path.normpath(record_path) + try: + _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) + except ValueError: + message = ( + "Unexpected file in {}: {!r}. .data directory contents" + " should be named like: '/'." + ).format(wheel_path, record_path) + raise InstallationError(message) + + try: + scheme_path = scheme_paths[scheme_key] + except KeyError: + valid_scheme_keys = ", ".join(sorted(scheme_paths)) + message = ( + "Unknown scheme key used in {}: {} (for file {!r}). .data" + " directory contents should be in subdirectories named" + " with a valid scheme key ({})" + ).format(wheel_path, scheme_key, record_path, valid_scheme_keys) + raise InstallationError(message) + + dest_path = os.path.join(scheme_path, dest_subpath) + assert_no_path_traversal(scheme_path, dest_path) + return ZipBackedFile(record_path, dest_path, zip_file) + + return make_data_scheme_file + + def is_data_scheme_path(path: RecordPath) -> bool: + return path.split("/", 1)[0].endswith(".data") + + paths = cast(List[RecordPath], wheel_zip.namelist()) + file_paths = filterfalse(is_dir_path, paths) + root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths) + + make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir) + files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths) + + def is_script_scheme_path(path: RecordPath) -> bool: + parts = path.split("/", 2) + return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" + + other_scheme_paths, script_scheme_paths = partition( + is_script_scheme_path, data_scheme_paths + ) + + make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) + other_scheme_files = map(make_data_scheme_file, other_scheme_paths) + files = chain(files, other_scheme_files) + + # Get the defined entry points + distribution = get_wheel_distribution( + FilesystemWheel(wheel_path), + canonicalize_name(name), + ) + console, gui = get_entrypoints(distribution) + + def is_entrypoint_wrapper(file: "File") -> bool: + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + path = file.dest_path + name = os.path.basename(path) + if name.lower().endswith(".exe"): + matchname = name[:-4] + elif name.lower().endswith("-script.py"): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return matchname in console or matchname in gui + + script_scheme_files: Iterator[File] = map( + make_data_scheme_file, script_scheme_paths + ) + script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files) + script_scheme_files = map(ScriptFile, script_scheme_files) + files = chain(files, script_scheme_files) + + for file in files: + file.save() + record_installed(file.src_record_path, file.dest_path, file.changed) + + def pyc_source_file_paths() -> Iterator[str]: + # We de-duplicate installation paths, since there can be overlap (e.g. + # file in .data maps to same location as file in wheel root). + # Sorting installation paths makes it easier to reproduce and debug + # issues related to permissions on existing files. + for installed_path in sorted(set(installed.values())): + full_installed_path = os.path.join(lib_dir, installed_path) + if not os.path.isfile(full_installed_path): + continue + if not full_installed_path.endswith(".py"): + continue + yield full_installed_path + + def pyc_output_path(path: str) -> str: + """Return the path the pyc file would have been written to.""" + return importlib.util.cache_from_source(path) + + # Compile all of the pyc files for the installed files + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + for path in pyc_source_file_paths(): + success = compileall.compile_file(path, force=True, quiet=True) + if success: + pyc_path = pyc_output_path(path) + assert os.path.exists(pyc_path) + pyc_record_path = cast( + "RecordPath", pyc_path.replace(os.path.sep, "/") + ) + record_installed(pyc_record_path, pyc_path) + logger.debug(stdout.getvalue()) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {""} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate = get_console_script_specs(console) + + gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items())) + + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True})) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + generated_file_mode = 0o666 & ~current_umask() + + @contextlib.contextmanager + def _generate_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: + with adjacent_tmp_file(path, **kwargs) as f: + yield f + os.chmod(f.name, generated_file_mode) + replace(f.name, path) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Record pip as the installer + installer_path = os.path.join(dest_info_dir, "INSTALLER") + with _generate_file(installer_path) as installer_file: + installer_file.write(b"pip\n") + generated.append(installer_path) + + # Record the PEP 610 direct URL reference + if direct_url is not None: + direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) + with _generate_file(direct_url_path) as direct_url_file: + direct_url_file.write(direct_url.to_json().encode("utf-8")) + generated.append(direct_url_path) + + # Record the REQUESTED file + if requested: + requested_path = os.path.join(dest_info_dir, "REQUESTED") + with open(requested_path, "wb"): + pass + generated.append(requested_path) + + record_text = distribution.read_text("RECORD") + record_rows = list(csv.reader(record_text.splitlines())) + + rows = get_csv_rows_for_installed( + record_rows, + installed=installed, + changed=changed, + generated=generated, + lib_dir=lib_dir, + ) + + # Record details of all files installed + record_path = os.path.join(dest_info_dir, "RECORD") + + with _generate_file(record_path, **csv_io_kwargs("w")) as record_file: + # Explicitly cast to typing.IO[str] as a workaround for the mypy error: + # "writer" has incompatible type "BinaryIO"; expected "_Writer" + writer = csv.writer(cast("IO[str]", record_file)) + writer.writerows(_normalized_outrows(rows)) + + +@contextlib.contextmanager +def req_error_context(req_description: str) -> Iterator[None]: + try: + yield + except InstallationError as e: + message = "For req: {}. {}".format(req_description, e.args[0]) + raise InstallationError(message) from e + + +def install_wheel( + name: str, + wheel_path: str, + scheme: Scheme, + req_description: str, + pycompile: bool = True, + warn_script_location: bool = True, + direct_url: Optional[DirectUrl] = None, + requested: bool = False, +) -> None: + with ZipFile(wheel_path, allowZip64=True) as z: + with req_error_context(req_description): + _install_wheel( + name=name, + wheel_zip=z, + wheel_path=wheel_path, + scheme=scheme, + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=requested, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/operations/prepare.py b/myenv/lib/python3.10/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 000000000..a726f031a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,642 @@ +"""Prepares a distribution for installation +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import mimetypes +import os +import shutil +from typing import Dict, Iterable, List, Optional + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.distributions import make_distribution_for_install_requirement +from pip._internal.distributions.installed import InstalledDistribution +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + NetworkConnectionError, + PreviousBuildDirError, + VcsHashUnsupported, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.network.download import BatchDownloader, Downloader +from pip._internal.network.lazy_wheel import ( + HTTPRangeRequestUnsupported, + dist_from_wheel_url, +) +from pip._internal.network.session import PipSession +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_tracker import RequirementTracker +from pip._internal.utils.filesystem import copy2_fixed +from pip._internal.utils.hashes import Hashes, MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.unpacking import unpack_file +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +def _get_prepared_distribution( + req: InstallRequirement, + req_tracker: RequirementTracker, + finder: PackageFinder, + build_isolation: bool, +) -> BaseDistribution: + """Prepare a distribution for installation.""" + abstract_dist = make_distribution_for_install_requirement(req) + with req_tracker.track(req): + abstract_dist.prepare_distribution_metadata(finder, build_isolation) + return abstract_dist.get_metadata_distribution() + + +def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity) + + +class File: + def __init__(self, path: str, content_type: Optional[str]) -> None: + self.path = path + if content_type is None: + self.content_type = mimetypes.guess_type(path)[0] + else: + self.content_type = content_type + + +def get_http_url( + link: Link, + download: Downloader, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, +) -> File: + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = None + else: + # let's download to a tmp dir + from_path, content_type = download(link, temp_dir.path) + if hashes: + hashes.check_against_path(from_path) + + return File(from_path, content_type) + + +def _copy2_ignoring_special_files(src: str, dest: str) -> None: + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + src, + dest, + ) + + +def _copy_source_tree(source: str, target: str) -> None: + target_abspath = os.path.abspath(target) + target_basename = os.path.basename(target_abspath) + target_dirname = os.path.dirname(target_abspath) + + def ignore(d: str, names: List[str]) -> List[str]: + skipped: List[str] = [] + if d == source: + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + skipped += [".tox", ".nox"] + if os.path.abspath(d) == target_dirname: + # Prevent an infinite recursion if the target is in source. + # This can happen when TMPDIR is set to ${PWD}/... + # and we copy PWD to TMPDIR. + skipped += [target_basename] + return skipped + + shutil.copytree( + source, + target, + ignore=ignore, + symlinks=True, + copy_function=_copy2_ignoring_special_files, + ) + + +def get_file_url( + link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None +) -> File: + """Get file and optionally check its hash.""" + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, download_dir, hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link.file_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + return File(from_path, None) + + +def unpack_url( + link: Link, + location: str, + download: Downloader, + verbosity: int, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, +) -> Optional[File]: + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location, verbosity=verbosity) + return None + + # Once out-of-tree-builds are no longer supported, could potentially + # replace the below condition with `assert not link.is_existing_dir` + # - unpack_url does not need to be called for in-tree-builds. + # + # As further cleanup, _copy_source_tree and accompanying tests can + # be removed. + # + # TODO when use-deprecated=out-of-tree-build is removed + if link.is_existing_dir(): + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link.file_path, location) + return None + + # file urls + if link.is_file: + file = get_file_url(link, download_dir, hashes=hashes) + + # http urls + else: + file = get_http_url( + link, + download, + download_dir, + hashes=hashes, + ) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies, except wheels + if not link.is_wheel: + unpack_file(file.path, location, file.content_type) + + return file + + +def _check_download_dir( + link: Link, download_dir: str, hashes: Optional[Hashes] +) -> Optional[str]: + """Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info("File was already downloaded %s", download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + "Previously-downloaded file %s has bad hash. Re-downloading.", + download_path, + ) + os.unlink(download_path) + return None + return download_path + + +class RequirementPreparer: + """Prepares a Requirement""" + + def __init__( + self, + build_dir: str, + download_dir: Optional[str], + src_dir: str, + build_isolation: bool, + req_tracker: RequirementTracker, + session: PipSession, + progress_bar: str, + finder: PackageFinder, + require_hashes: bool, + use_user_site: bool, + lazy_wheel: bool, + verbosity: int, + in_tree_build: bool, + ) -> None: + super().__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.req_tracker = req_tracker + self._session = session + self._download = Downloader(session, progress_bar) + self._batch_download = BatchDownloader(session, progress_bar) + self.finder = finder + + # Where still-packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Is build isolation allowed? + self.build_isolation = build_isolation + + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + + # Should wheels be downloaded lazily? + self.use_lazy_wheel = lazy_wheel + + # How verbose should underlying tooling be? + self.verbosity = verbosity + + # Should in-tree builds be used for local paths? + self.in_tree_build = in_tree_build + + # Memoized downloaded files, as mapping of url: path. + self._downloaded: Dict[str, str] = {} + + # Previous "header" printed for a link-based InstallRequirement + self._previous_requirement_header = ("", "") + + def _log_preparing_link(self, req: InstallRequirement) -> None: + """Provide context for the requirement being prepared.""" + if req.link.is_file and not req.original_link_is_in_wheel_cache: + message = "Processing %s" + information = str(display_path(req.link.file_path)) + else: + message = "Collecting %s" + information = str(req.req or req) + + if (message, information) != self._previous_requirement_header: + self._previous_requirement_header = (message, information) + logger.info(message, information) + + if req.original_link_is_in_wheel_cache: + with indent_log(): + logger.info("Using cached %s", req.link.filename) + + def _ensure_link_req_src_dir( + self, req: InstallRequirement, parallel_builds: bool + ) -> None: + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + # We don't need to unpack wheels, so no need for a source + # directory. + return + assert req.source_dir is None + if req.link.is_existing_dir() and self.in_tree_build: + # build local directories in-tree + req.source_dir = req.link.file_path + return + + # We always delete unpacked sdists after pip runs. + req.ensure_has_source_dir( + self.build_dir, + autodelete=True, + parallel_builds=parallel_builds, + ) + + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + # TODO: this check is now probably dead code + if is_installable_dir(req.source_dir): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(req, req.source_dir) + ) + + def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if req.original_link is None and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + + def _fetch_metadata_using_lazy_wheel( + self, + link: Link, + ) -> Optional[BaseDistribution]: + """Fetch metadata using lazy wheel, if possible.""" + if not self.use_lazy_wheel: + return None + if self.require_hashes: + logger.debug("Lazy wheel is not used as hash checking is required") + return None + if link.is_file or not link.is_wheel: + logger.debug( + "Lazy wheel is not used as %r does not points to a remote wheel", + link, + ) + return None + + wheel = Wheel(link.filename) + name = canonicalize_name(wheel.name) + logger.info( + "Obtaining dependency information from %s %s", + name, + wheel.version, + ) + url = link.url.split("#", 1)[0] + try: + return dist_from_wheel_url(name, url, self._session) + except HTTPRangeRequestUnsupported: + logger.debug("%s does not support range requests", url) + return None + + def _complete_partial_requirements( + self, + partially_downloaded_reqs: Iterable[InstallRequirement], + parallel_builds: bool = False, + ) -> None: + """Download any requirements which were only fetched by metadata.""" + # Download to a temporary directory. These will be copied over as + # needed for downstream 'download', 'wheel', and 'install' commands. + temp_dir = TempDirectory(kind="unpack", globally_managed=True).path + + # Map each link to the requirement that owns it. This allows us to set + # `req.local_file_path` on the appropriate requirement after passing + # all the links at once into BatchDownloader. + links_to_fully_download: Dict[Link, InstallRequirement] = {} + for req in partially_downloaded_reqs: + assert req.link + links_to_fully_download[req.link] = req + + batch_download = self._batch_download( + links_to_fully_download.keys(), + temp_dir, + ) + for link, (filepath, _) in batch_download: + logger.debug("Downloading link %s to %s", link, filepath) + req = links_to_fully_download[link] + req.local_file_path = filepath + + # This step is necessary to ensure all lazy wheels are processed + # successfully by the 'download', 'wheel', and 'install' commands. + for req in partially_downloaded_reqs: + self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool = False + ) -> BaseDistribution: + """Prepare a requirement to be obtained from req.link.""" + assert req.link + link = req.link + self._log_preparing_link(req) + with indent_log(): + # Check if the relevant file is already available + # in the download directory + file_path = None + if self.download_dir is not None and link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + + if file_path is not None: + # The file is already available, so mark it as downloaded + self._downloaded[req.link.url] = file_path + else: + # The file is not available, attempt to fetch only metadata + wheel_dist = self._fetch_metadata_using_lazy_wheel(link) + if wheel_dist is not None: + req.needs_more_preparation = True + return wheel_dist + + # None of the optimizations worked, fully prepare the requirement + return self._prepare_linked_requirement(req, parallel_builds) + + def prepare_linked_requirements_more( + self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False + ) -> None: + """Prepare linked requirements more, if needed.""" + reqs = [req for req in reqs if req.needs_more_preparation] + for req in reqs: + # Determine if any of these requirements were already downloaded. + if self.download_dir is not None and req.link.is_wheel: + hashes = self._get_linked_req_hashes(req) + file_path = _check_download_dir(req.link, self.download_dir, hashes) + if file_path is not None: + self._downloaded[req.link.url] = file_path + req.needs_more_preparation = False + + # Prepare requirements we found were already downloaded for some + # reason. The other downloads will be completed separately. + partially_downloaded_reqs: List[InstallRequirement] = [] + for req in reqs: + if req.needs_more_preparation: + partially_downloaded_reqs.append(req) + else: + self._prepare_linked_requirement(req, parallel_builds) + + # TODO: separate this part out from RequirementPreparer when the v1 + # resolver can be removed! + self._complete_partial_requirements( + partially_downloaded_reqs, + parallel_builds=parallel_builds, + ) + + def _prepare_linked_requirement( + self, req: InstallRequirement, parallel_builds: bool + ) -> BaseDistribution: + assert req.link + link = req.link + + self._ensure_link_req_src_dir(req, parallel_builds) + hashes = self._get_linked_req_hashes(req) + + if link.is_existing_dir() and self.in_tree_build: + local_file = None + elif link.url not in self._downloaded: + try: + local_file = unpack_url( + link, + req.source_dir, + self._download, + self.verbosity, + self.download_dir, + hashes, + ) + except NetworkConnectionError as exc: + raise InstallationError( + "Could not install requirement {} because of HTTP " + "error {} for URL {}".format(req, exc, link) + ) + else: + file_path = self._downloaded[link.url] + if hashes: + hashes.check_against_path(file_path) + local_file = File(file_path, content_type=None) + + # For use in later processing, + # preserve the file path on the requirement. + if local_file: + req.local_file_path = local_file.path + + dist = _get_prepared_distribution( + req, + self.req_tracker, + self.finder, + self.build_isolation, + ) + return dist + + def save_linked_requirement(self, req: InstallRequirement) -> None: + assert self.download_dir is not None + assert req.link is not None + link = req.link + if link.is_vcs or (link.is_existing_dir() and req.editable): + # Make a .zip of the source_dir we already created. + req.archive(self.download_dir) + return + + if link.is_existing_dir(): + logger.debug( + "Not copying link to destination directory " + "since it is a directory: %s", + link, + ) + return + if req.local_file_path is None: + # No distribution was downloaded for this requirement. + return + + download_location = os.path.join(self.download_dir, link.filename) + if not os.path.exists(download_location): + shutil.copy(req.local_file_path, download_location) + download_path = display_path(download_location) + logger.info("Saved %s", download_path) + + def prepare_editable_requirement( + self, + req: InstallRequirement, + ) -> BaseDistribution: + """Prepare an editable requirement.""" + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info("Obtaining %s", req) + + with indent_log(): + if self.require_hashes: + raise InstallationError( + "The editable requirement {} cannot be installed when " + "requiring hashes, because there is no single file to " + "hash.".format(req) + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable() + + dist = _get_prepared_distribution( + req, + self.req_tracker, + self.finder, + self.build_isolation, + ) + + req.check_if_exists(self.use_user_site) + + return dist + + def prepare_installed_requirement( + self, + req: InstallRequirement, + skip_reason: str, + ) -> BaseDistribution: + """Prepare an already-installed requirement.""" + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + "is set to {}".format(req.satisfied_by) + ) + logger.info( + "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if self.require_hashes: + logger.debug( + "Since it is already installed, we are trusting this " + "package without checking its hash. To ensure a " + "completely repeatable environment, install into an " + "empty virtualenv." + ) + return InstalledDistribution(req).get_metadata_distribution() diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/pyproject.py b/myenv/lib/python3.10/site-packages/pip/_internal/pyproject.py new file mode 100644 index 000000000..e183eaf86 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/pyproject.py @@ -0,0 +1,168 @@ +import os +from collections import namedtuple +from typing import Any, List, Optional + +from pip._vendor import tomli +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement + +from pip._internal.exceptions import ( + InstallationError, + InvalidPyProjectBuildRequires, + MissingPyProjectBuildRequires, +) + + +def _is_list_of_str(obj: Any) -> bool: + return isinstance(obj, list) and all(isinstance(item, str) for item in obj) + + +def make_pyproject_path(unpacked_source_directory: str) -> str: + return os.path.join(unpacked_source_directory, "pyproject.toml") + + +BuildSystemDetails = namedtuple( + "BuildSystemDetails", ["requires", "backend", "check", "backend_path"] +) + + +def load_pyproject_toml( + use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str +) -> Optional[BuildSystemDetails]: + """Load the pyproject.toml file. + + Parameters: + use_pep517 - Has the user requested PEP 517 processing? None + means the user hasn't explicitly specified. + pyproject_toml - Location of the project's pyproject.toml file + setup_py - Location of the project's setup.py file + req_name - The name of the requirement we're processing (for + error reporting) + + Returns: + None if we should use the legacy code path, otherwise a tuple + ( + requirements from pyproject.toml, + name of PEP 517 backend, + requirements we should check are installed after setting + up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. + ) + """ + has_pyproject = os.path.isfile(pyproject_toml) + has_setup = os.path.isfile(setup_py) + + if not has_pyproject and not has_setup: + raise InstallationError( + f"{req_name} does not appear to be a Python project: " + f"neither 'setup.py' nor 'pyproject.toml' found." + ) + + if has_pyproject: + with open(pyproject_toml, encoding="utf-8") as f: + pp_toml = tomli.loads(f.read()) + build_system = pp_toml.get("build-system") + else: + build_system = None + + # The following cases must use PEP 517 + # We check for use_pep517 being non-None and falsey because that means + # the user explicitly requested --no-use-pep517. The value 0 as + # opposed to False can occur when the value is provided via an + # environment variable or config file option (due to the quirk of + # strtobool() returning an integer in pip's configuration code). + if has_pyproject and not has_setup: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project does not have a setup.py" + ) + use_pep517 = True + elif build_system and "build-backend" in build_system: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project specifies a build backend of {} " + "in pyproject.toml".format(build_system["build-backend"]) + ) + use_pep517 = True + + # If we haven't worked out whether to use PEP 517 yet, + # and the user hasn't explicitly stated a preference, + # we do so if the project has a pyproject.toml file. + elif use_pep517 is None: + use_pep517 = has_pyproject + + # At this point, we know whether we're going to use PEP 517. + assert use_pep517 is not None + + # If we're using the legacy code path, there is nothing further + # for us to do here. + if not use_pep517: + return None + + if build_system is None: + # Either the user has a pyproject.toml with no build-system + # section, or the user has no pyproject.toml, but has opted in + # explicitly via --use-pep517. + # In the absence of any explicit backend specification, we + # assume the setuptools backend that most closely emulates the + # traditional direct setup.py execution, and require wheel and + # a version of setuptools that supports that backend. + + build_system = { + "requires": ["setuptools>=40.8.0", "wheel"], + "build-backend": "setuptools.build_meta:__legacy__", + } + + # If we're using PEP 517, we have build system information (either + # from pyproject.toml, or defaulted by the code above). + # Note that at this point, we do not know if the user has actually + # specified a backend, though. + assert build_system is not None + + # Ensure that the build-system section in pyproject.toml conforms + # to PEP 518. + + # Specifying the build-system table but not the requires key is invalid + if "requires" not in build_system: + raise MissingPyProjectBuildRequires(package=req_name) + + # Error out if requires is not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InvalidPyProjectBuildRequires( + package=req_name, + reason="It is not a list of strings.", + ) + + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement as error: + raise InvalidPyProjectBuildRequires( + package=req_name, + reason=f"It contains an invalid requirement: {requirement!r}", + ) from error + + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) + check: List[str] = [] + if backend is None: + # If the user didn't specify a backend, we assume they want to use + # the setuptools backend. But we can't be sure they have included + # a version of setuptools which supplies the backend, or wheel + # (which is needed by the backend) in their requirements. So we + # make a note to check that those requirements are present once + # we have set up the environment. + # This is quite a lot of work to check for a very specific case. But + # the problem is, that case is potentially quite common - projects that + # adopted PEP 518 early for the ability to specify requirements to + # execute setup.py, but never considered needing to mention the build + # tools themselves. The original PEP 518 code had a similar check (but + # implemented in a different way). + backend = "setuptools.build_meta:__legacy__" + check = ["setuptools>=40.8.0", "wheel"] + + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/__init__.py new file mode 100644 index 000000000..70dea27a6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/__init__.py @@ -0,0 +1,94 @@ +import collections +import logging +from typing import Iterator, List, Optional, Sequence, Tuple + +from pip._internal.utils.logging import indent_log + +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + +__all__ = [ + "RequirementSet", + "InstallRequirement", + "parse_requirements", + "install_given_reqs", +] + +logger = logging.getLogger(__name__) + + +class InstallationResult: + def __init__(self, name: str) -> None: + self.name = name + + def __repr__(self) -> str: + return f"InstallationResult(name={self.name!r})" + + +def _validate_requirements( + requirements: List[InstallRequirement], +) -> Iterator[Tuple[str, InstallRequirement]]: + for req in requirements: + assert req.name, f"invalid to-be-installed requirement: {req}" + yield req.name, req + + +def install_given_reqs( + requirements: List[InstallRequirement], + install_options: List[str], + global_options: Sequence[str], + root: Optional[str], + home: Optional[str], + prefix: Optional[str], + warn_script_location: bool, + use_user_site: bool, + pycompile: bool, +) -> List[InstallationResult]: + """ + Install everything in the given list. + + (to be called after having downloaded and unpacked the packages) + """ + to_install = collections.OrderedDict(_validate_requirements(requirements)) + + if to_install: + logger.info( + "Installing collected packages: %s", + ", ".join(to_install.keys()), + ) + + installed = [] + + with indent_log(): + for req_name, requirement in to_install.items(): + if requirement.should_reinstall: + logger.info("Attempting uninstall: %s", req_name) + with indent_log(): + uninstalled_pathset = requirement.uninstall(auto_confirm=True) + else: + uninstalled_pathset = None + + try: + requirement.install( + install_options, + global_options, + root=root, + home=home, + prefix=prefix, + warn_script_location=warn_script_location, + use_user_site=use_user_site, + pycompile=pycompile, + ) + except Exception: + # if install did not succeed, rollback previous uninstall + if uninstalled_pathset and not requirement.install_succeeded: + uninstalled_pathset.rollback() + raise + else: + if uninstalled_pathset and requirement.install_succeeded: + uninstalled_pathset.commit() + + installed.append(InstallationResult(req_name)) + + return installed diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..05f5697de Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc new file mode 100644 index 000000000..544094b5a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc new file mode 100644 index 000000000..fe47030d9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc new file mode 100644 index 000000000..b0af9ac02 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc new file mode 100644 index 000000000..b0772b15a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc new file mode 100644 index 000000000..b0b692a4f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc new file mode 100644 index 000000000..09b15f528 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/constructors.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/constructors.py new file mode 100644 index 000000000..25bfb391d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/constructors.py @@ -0,0 +1,490 @@ +"""Backing implementation for InstallRequirement's various constructors + +The idea here is that these formed a major chunk of InstallRequirement's size +so, moving them and support code dedicated to them outside of that class +helps creates for better understandability for the rest of the code. + +These are meant to be used elsewhere within pip to create instances of +InstallRequirement. +""" + +import logging +import os +import re +from typing import Any, Dict, Optional, Set, Tuple, Union + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.specifiers import Specifier + +from pip._internal.exceptions import InstallationError +from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_file import ParsedRequirement +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.filetypes import is_archive_file +from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs + +__all__ = [ + "install_req_from_editable", + "install_req_from_line", + "parse_editable", +] + +logger = logging.getLogger(__name__) +operators = Specifier._operators.keys() + + +def _strip_extras(path: str) -> Tuple[str, Optional[str]]: + m = re.match(r"^(.+)(\[[^\]]+\])$", path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +def convert_extras(extras: Optional[str]) -> Set[str]: + if not extras: + return set() + return get_requirement("placeholder" + extras.lower()).extras + + +def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith("file:"): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + get_requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, set() + + for version_control in vcs: + if url.lower().startswith(f"{version_control}:"): + url = f"{version_control}+{url}" + break + + link = Link(url) + + if not link.is_vcs: + backends = ", ".join(vcs.all_schemes) + raise InstallationError( + f"{editable_req} is not a valid editable requirement. " + f"It should either be a path to a local project or a VCS URL " + f"(beginning with {backends})." + ) + + package_name = link.egg_fragment + if not package_name: + raise InstallationError( + "Could not detect requirement name for '{}', please specify one " + "with #egg=your_package_name".format(editable_req) + ) + return package_name, url, set() + + +def check_first_requirement_in_file(filename: str) -> None: + """Check if file is parsable as a requirements file. + + This is heavily based on ``pkg_resources.parse_requirements``, but + simplified to just check the first meaningful line. + + :raises InvalidRequirement: If the first meaningful line cannot be parsed + as an requirement. + """ + with open(filename, encoding="utf-8", errors="ignore") as f: + # Create a steppable iterator, so we can handle \-continuations. + lines = ( + line + for line in (line.strip() for line in f) + if line and not line.startswith("#") # Skip blank lines/comments. + ) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if " #" in line: + line = line[: line.find(" #")] + # If there is a line continuation, drop it, and append the next line. + if line.endswith("\\"): + line = line[:-2].strip() + next(lines, "") + Requirement(line) + return + + +def deduce_helpful_msg(req: str) -> str: + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + if not os.path.exists(req): + return f" File '{req}' does not exist." + msg = " The path does exist. " + # Try to parse and check if it is a requirements file. + try: + check_first_requirement_in_file(req) + except InvalidRequirement: + logger.debug("Cannot parse '%s' as requirements file", req) + else: + msg += ( + f"The argument you provided " + f"({req}) appears to be a" + f" requirements file. If that is the" + f" case, use the '-r' flag to install" + f" the packages specified within it." + ) + return msg + + +class RequirementParts: + def __init__( + self, + requirement: Optional[Requirement], + link: Optional[Link], + markers: Optional[Marker], + extras: Set[str], + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req: str) -> RequirementParts: + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req: Optional[Requirement] = Requirement(name) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{name}'") + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + +# ---- The actual constructors follow ---- + + +def install_req_from_editable( + editable_req: str, + comes_from: Optional[Union[InstallRequirement, str]] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + options: Optional[Dict[str, Any]] = None, + constraint: bool = False, + user_supplied: bool = False, + permit_editable_wheels: bool = False, +) -> InstallRequirement: + + parts = parse_req_from_editable(editable_req) + + return InstallRequirement( + parts.requirement, + comes_from=comes_from, + user_supplied=user_supplied, + editable=True, + permit_editable_wheels=permit_editable_wheels, + link=parts.link, + constraint=constraint, + use_pep517=use_pep517, + isolated=isolated, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + extras=parts.extras, + ) + + +def _looks_like_path(name: str) -> bool: + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path: str, name: str) -> Optional[str]: + """ + First, it checks whether a provided path is an installable directory. If it + is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + # TODO: The is_installable_dir test here might not be necessary + # now that it is done in load_pyproject_toml too. + raise InstallationError( + f"Directory {name!r} is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found." + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split("@", 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + "Requirement %r looks like a filename, but the file does not exist", + name, + ) + return path_to_url(path) + + +def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts: + if is_url(name): + marker_sep = "; " + else: + marker_sep = ";" + if marker_sep in name: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: + markers = None + else: + markers = Marker(markers_as_string) + else: + markers = None + name = name.strip() + req_as_string = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras_as_string = None + + if is_url(name): + link = Link(name) + else: + p, extras_as_string = _strip_extras(path) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == "file" and re.search(r"\.\./", link.url): + link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req_as_string = f"{wheel.name}=={wheel.version}" + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req_as_string = link.egg_fragment + + # a requirement specifier + else: + req_as_string = name + + extras = convert_extras(extras_as_string) + + def with_source(text: str) -> str: + if not line_source: + return text + return f"{text} (from {line_source})" + + def _parse_req_string(req_as_string: str) -> Requirement: + try: + req = get_requirement(req_as_string) + except InvalidRequirement: + if os.path.sep in req_as_string: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req_as_string) + elif "=" in req_as_string and not any( + op in req_as_string for op in operators + ): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = "" + msg = with_source(f"Invalid requirement: {req_as_string!r}") + if add_msg: + msg += f"\nHint: {add_msg}" + raise InstallationError(msg) + else: + # Deprecate extras after specifiers: "name>=1.0[extras]" + # This currently works by accident because _strip_extras() parses + # any extras in the end of the string and those are saved in + # RequirementParts + for spec in req.specifier: + spec_str = str(spec) + if spec_str.endswith("]"): + msg = f"Extras after version '{spec_str}'." + raise InstallationError(msg) + return req + + if req_as_string is not None: + req: Optional[Requirement] = _parse_req_string(req_as_string) + else: + req = None + + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name: str, + comes_from: Optional[Union[str, InstallRequirement]] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + options: Optional[Dict[str, Any]] = None, + constraint: bool = False, + line_source: Optional[str] = None, + user_supplied: bool = False, +) -> InstallRequirement: + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + + return InstallRequirement( + parts.requirement, + comes_from, + link=parts.link, + markers=parts.markers, + use_pep517=use_pep517, + isolated=isolated, + install_options=options.get("install_options", []) if options else [], + global_options=options.get("global_options", []) if options else [], + hash_options=options.get("hashes", {}) if options else {}, + constraint=constraint, + extras=parts.extras, + user_supplied=user_supplied, + ) + + +def install_req_from_req_string( + req_string: str, + comes_from: Optional[InstallRequirement] = None, + isolated: bool = False, + use_pep517: Optional[bool] = None, + user_supplied: bool = False, +) -> InstallRequirement: + try: + req = get_requirement(req_string) + except InvalidRequirement: + raise InstallationError(f"Invalid requirement: '{req_string}'") + + domains_not_allowed = [ + PyPI.file_storage_domain, + TestPyPI.file_storage_domain, + ] + if ( + req.url + and comes_from + and comes_from.link + and comes_from.link.netloc in domains_not_allowed + ): + # Explicitly disallow pypi packages that depend on external urls + raise InstallationError( + "Packages installed from PyPI cannot depend on packages " + "which are not also hosted on PyPI.\n" + "{} depends on {} ".format(comes_from.name, req) + ) + + return InstallRequirement( + req, + comes_from, + isolated=isolated, + use_pep517=use_pep517, + user_supplied=user_supplied, + ) + + +def install_req_from_parsed_requirement( + parsed_req: ParsedRequirement, + isolated: bool = False, + use_pep517: Optional[bool] = None, + user_supplied: bool = False, +) -> InstallRequirement: + if parsed_req.is_editable: + req = install_req_from_editable( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + constraint=parsed_req.constraint, + isolated=isolated, + user_supplied=user_supplied, + ) + + else: + req = install_req_from_line( + parsed_req.requirement, + comes_from=parsed_req.comes_from, + use_pep517=use_pep517, + isolated=isolated, + options=parsed_req.options, + constraint=parsed_req.constraint, + line_source=parsed_req.line_source, + user_supplied=user_supplied, + ) + return req + + +def install_req_from_link_and_ireq( + link: Link, ireq: InstallRequirement +) -> InstallRequirement: + return InstallRequirement( + req=ireq.req, + comes_from=ireq.comes_from, + editable=ireq.editable, + link=link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + install_options=ireq.install_options, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/req_file.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 000000000..03ae50492 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,536 @@ +""" +Requirements file parsing +""" + +import optparse +import os +import re +import shlex +import urllib.parse +from optparse import Values +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Tuple, +) + +from pip._internal.cli import cmdoptions +from pip._internal.exceptions import InstallationError, RequirementsFileParseError +from pip._internal.models.search_scope import SearchScope +from pip._internal.network.session import PipSession +from pip._internal.network.utils import raise_for_status +from pip._internal.utils.encoding import auto_decode +from pip._internal.utils.urls import get_url_scheme + +if TYPE_CHECKING: + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + from pip._internal.index.package_finder import PackageFinder + +__all__ = ["parse_requirements"] + +ReqFileLines = Iterable[Tuple[int, str]] + +LineParser = Callable[[str], Tuple[str, Values]] + +SCHEME_RE = re.compile(r"^(http|https|file):", re.I) +COMMENT_RE = re.compile(r"(^|\s+)#.*$") + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r"(?P\$\{(?P[A-Z0-9_]+)\})") + +SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, + cmdoptions.constraints, + cmdoptions.requirements, + cmdoptions.editable, + cmdoptions.find_links, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.prefer_binary, + cmdoptions.require_hashes, + cmdoptions.pre, + cmdoptions.trusted_host, + cmdoptions.use_new_feature, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] + + +class ParsedRequirement: + def __init__( + self, + requirement: str, + is_editable: bool, + comes_from: str, + constraint: bool, + options: Optional[Dict[str, Any]] = None, + line_source: Optional[str] = None, + ) -> None: + self.requirement = requirement + self.is_editable = is_editable + self.comes_from = comes_from + self.options = options + self.constraint = constraint + self.line_source = line_source + + +class ParsedLine: + def __init__( + self, + filename: str, + lineno: int, + args: str, + opts: Values, + constraint: bool, + ) -> None: + self.filename = filename + self.lineno = lineno + self.opts = opts + self.constraint = constraint + + if args: + self.is_requirement = True + self.is_editable = False + self.requirement = args + elif opts.editables: + self.is_requirement = True + self.is_editable = True + # We don't support multiple -e on one line + self.requirement = opts.editables[0] + else: + self.is_requirement = False + + +def parse_requirements( + filename: str, + session: PipSession, + finder: Optional["PackageFinder"] = None, + options: Optional[optparse.Values] = None, + constraint: bool = False, +) -> Iterator[ParsedRequirement]: + """Parse a requirements file and yield ParsedRequirement instances. + + :param filename: Path or url of requirements file. + :param session: PipSession instance. + :param finder: Instance of pip.index.PackageFinder. + :param options: cli options. + :param constraint: If true, parsing a constraint file rather than + requirements file. + """ + line_parser = get_line_parser(finder) + parser = RequirementsFileParser(session, line_parser) + + for parsed_line in parser.parse(filename, constraint): + parsed_req = handle_line( + parsed_line, options=options, finder=finder, session=session + ) + if parsed_req is not None: + yield parsed_req + + +def preprocess(content: str) -> ReqFileLines: + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + """ + lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def handle_requirement_line( + line: ParsedLine, + options: Optional[optparse.Values] = None, +) -> ParsedRequirement: + + # preserve for the nested code path + line_comes_from = "{} {} (line {})".format( + "-c" if line.constraint else "-r", + line.filename, + line.lineno, + ) + + assert line.is_requirement + + if line.is_editable: + # For editable requirements, we don't support per-requirement + # options, so just return the parsed requirement. + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + ) + else: + if options: + # Disable wheels if the user has specified build options + cmdoptions.check_install_build_global(options, line.opts) + + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + + line_source = f"line {line.lineno} of {line.filename}" + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) + + +def handle_option_line( + opts: Values, + filename: str, + lineno: int, + finder: Optional["PackageFinder"] = None, + options: Optional[optparse.Values] = None, + session: Optional[PipSession] = None, +) -> None: + + if options: + # percolate options upward + if opts.require_hashes: + options.require_hashes = opts.require_hashes + if opts.features_enabled: + options.features_enabled.extend( + f for f in opts.features_enabled if f not in options.features_enabled + ) + + # set finder options + if finder: + find_links = finder.find_links + index_urls = finder.index_urls + if opts.index_url: + index_urls = [opts.index_url] + if opts.no_index is True: + index_urls = [] + if opts.extra_index_urls: + index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + find_links.append(value) + + if session: + # We need to update the auth urls in session + session.update_index_urls(index_urls) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + ) + finder.search_scope = search_scope + + if opts.pre: + finder.set_allow_all_prereleases() + + if opts.prefer_binary: + finder.set_prefer_binary() + + if session: + for host in opts.trusted_hosts or []: + source = f"line {lineno} of {filename}" + session.add_trusted_host(host, source=source) + + +def handle_line( + line: ParsedLine, + options: Optional[optparse.Values] = None, + finder: Optional["PackageFinder"] = None, + session: Optional[PipSession] = None, +) -> Optional[ParsedRequirement]: + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + :param line: The parsed line to be processed. + :param options: CLI options. + :param finder: The finder - updated by non-requirement lines. + :param session: The session - updated by non-requirement lines. + + Returns a ParsedRequirement object if the line is a requirement line, + otherwise returns None. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + if line.is_requirement: + parsed_req = handle_requirement_line(line, options) + return parsed_req + else: + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None + + +class RequirementsFileParser: + def __init__( + self, + session: PipSession, + line_parser: LineParser, + ) -> None: + self._session = session + self._line_parser = line_parser + + def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: + """Parse a given file, yielding parsed lines.""" + yield from self._parse_and_recurse(filename, constraint) + + def _parse_and_recurse( + self, filename: str, constraint: bool + ) -> Iterator[ParsedLine]: + for line in self._parse_file(filename, constraint): + if not line.is_requirement and ( + line.opts.requirements or line.opts.constraints + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib.parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), + req_path, + ) + + yield from self._parse_and_recurse(req_path, nested_constraint) + else: + yield line + + def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: + _, content = get_file_content(filename, self._session) + + lines_enum = preprocess(content) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = f"Invalid requirement: {line}\n{e.msg}" + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser: + def parse_line(line: str) -> Tuple[str, Values]: + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + + opts, _ = parser.parse_args(shlex.split(options_str), defaults) + + return args_str, opts + + return parse_line + + +def break_args_options(line: str) -> Tuple[str, str]: + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(" ") + args = [] + options = tokens[:] + for token in tokens: + if token.startswith("-") or token.startswith("--"): + break + else: + args.append(token) + options.pop(0) + return " ".join(args), " ".join(options) + + +class OptionParsingError(Exception): + def __init__(self, msg: str) -> None: + self.msg = msg + + +def build_parser() -> optparse.OptionParser: + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self: Any, msg: str) -> "NoReturn": + raise OptionParsingError(msg) + + # NOTE: mypy disallows assigning to a method + # https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore + + return parser + + +def join_lines(lines_enum: ReqFileLines) -> ReqFileLines: + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line: List[str] = [] + for line_number, line in lines_enum: + if not line.endswith("\\") or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = " " + line + if new_line: + new_line.append(line) + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip("\\")) + + # last line contains \ + if new_line: + assert primary_line_number is not None + yield primary_line_number, "".join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines: + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub("", line) + line = line.strip() + if line: + yield line_number, line + + +def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines: + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discussion on the `github pull + request #3514 `_. + + Valid characters in variable names follow the `POSIX standard + `_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line + + +def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + """ + scheme = get_url_scheme(url) + + # Pip has special support for file:// URLs (LocalFSAdapter). + if scheme in ["http", "https", "file"]: + resp = session.get(url) + raise_for_status(resp) + return resp.url, resp.text + + # Assume this is a bare path. + try: + with open(url, "rb") as f: + content = auto_decode(f.read()) + except OSError as exc: + raise InstallationError(f"Could not open requirements file: {exc}") + return url, content diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/req_install.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 000000000..02dbda194 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,858 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import functools +import logging +import os +import shutil +import sys +import uuid +import zipfile +from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment +from pip._internal.exceptions import InstallationError, LegacyInstallFailure +from pip._internal.locations import get_scheme +from pip._internal.metadata import ( + BaseDistribution, + get_default_environment, + get_directory_distribution, +) +from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_editable import generate_editable_metadata +from pip._internal.operations.build.metadata_legacy import ( + generate_metadata as generate_metadata_legacy, +) +from pip._internal.operations.install.editable_legacy import ( + install_editable as install_editable_legacy, +) +from pip._internal.operations.install.legacy import install as install_legacy +from pip._internal.operations.install.wheel import install_wheel +from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.direct_url_helpers import ( + direct_url_for_editable, + direct_url_from_link, +) +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + hide_url, + redact_auth_from_url, +) +from pip._internal.utils.packaging import safe_extra +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.virtualenv import running_under_virtualenv +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + + +class InstallRequirement: + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for + installing the said requirement. + """ + + def __init__( + self, + req: Optional[Requirement], + comes_from: Optional[Union[str, "InstallRequirement"]], + editable: bool = False, + link: Optional[Link] = None, + markers: Optional[Marker] = None, + use_pep517: Optional[bool] = None, + isolated: bool = False, + install_options: Optional[List[str]] = None, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, + constraint: bool = False, + extras: Collection[str] = (), + user_supplied: bool = False, + permit_editable_wheels: bool = False, + ) -> None: + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.editable = editable + self.permit_editable_wheels = permit_editable_wheels + self.legacy_install_reason: Optional[int] = None + + # source_dir is the local directory where the linked requirement is + # located, or unpacked. In case unpacking is needed, creating and + # populating source_dir is done by the RequirementPreparer. Note this + # is not necessarily the directory where pyproject.toml or setup.py is + # located - that one is obtained via unpacked_source_directory. + self.source_dir: Optional[str] = None + if self.editable: + assert link + if link.is_file: + self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) + + if link is None and req and req.url: + # PEP 508 URL requirement + link = Link(req.url) + self.link = self.original_link = link + self.original_link_is_in_wheel_cache = False + + # Path to any downloaded or already-existing package. + self.local_file_path: Optional[str] = None + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path + + if extras: + self.extras = extras + elif req: + self.extras = {safe_extra(extra) for extra in req.extras} + else: + self.extras = set() + if markers is None and req: + markers = req.marker + self.markers = markers + + # This holds the Distribution object if this requirement is already installed. + self.satisfied_by: Optional[BaseDistribution] = None + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False + # Temporary build location + self._temp_build_dir: Optional[TempDirectory] = None + # Set to True after successful installation + self.install_succeeded: Optional[bool] = None + # Supplied options + self.install_options = install_options if install_options else [] + self.global_options = global_options if global_options else [] + self.hash_options = hash_options if hash_options else {} + # Set to True after successful preparation of this requirement + self.prepared = False + # User supplied requirement are explicitly requested for installation + # by the user via CLI arguments or requirements files, as opposed to, + # e.g. dependencies, extras or constraints. + self.user_supplied = user_supplied + + self.isolated = isolated + self.build_env: BuildEnvironment = NoOpBuildEnvironment() + + # For PEP 517, the directory where we request the project metadata + # gets stored. We need this to pass to build_wheel, so the backend + # can ensure that the wheel matches the metadata (see the PEP for + # details). + self.metadata_directory: Optional[str] = None + + # The static build requirements (from pyproject.toml) + self.pyproject_requires: Optional[List[str]] = None + + # Build requirements that we will check are available + self.requirements_to_check: List[str] = [] + + # The PEP 517 backend we should use to build the project + self.pep517_backend: Optional[Pep517HookCaller] = None + + # Are we using PEP 517 for this requirement? + # After pyproject.toml has been loaded, the only valid values are True + # and False. Before loading, None is valid (meaning "use the default"). + # Setting an explicit value before loading pyproject.toml is supported, + # but after loading this flag should be treated as read only. + self.use_pep517 = use_pep517 + + # This requirement needs more preparation before it can be built + self.needs_more_preparation = False + + def __str__(self) -> str: + if self.req: + s = str(self.req) + if self.link: + s += " from {}".format(redact_auth_from_url(self.link.url)) + elif self.link: + s = redact_auth_from_url(self.link.url) + else: + s = "" + if self.satisfied_by is not None: + s += " in {}".format(display_path(self.satisfied_by.location)) + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from: Optional[str] = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += f" (from {comes_from})" + return s + + def __repr__(self) -> str: + return "<{} object: {} editable={!r}>".format( + self.__class__.__name__, str(self), self.editable + ) + + def format_debug(self) -> str: + """An un-tested helper for getting state, for debugging.""" + attributes = vars(self) + names = sorted(attributes) + + state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) + return "<{name} object: {{{state}}}>".format( + name=self.__class__.__name__, + state=", ".join(state), + ) + + # Things that are valid for all kinds of requirements? + @property + def name(self) -> Optional[str]: + if self.req is None: + return None + return self.req.name + + @functools.lru_cache() # use cached_property in python 3.8+ + def supports_pyproject_editable(self) -> bool: + if not self.use_pep517: + return False + assert self.pep517_backend + with self.build_env: + runner = runner_with_spinner_message( + "Checking if build backend supports build_editable" + ) + with self.pep517_backend.subprocess_runner(runner): + return "build_editable" in self.pep517_backend._supported_features() + + @property + def specifier(self) -> SpecifierSet: + return self.req.specifier + + @property + def is_pinned(self) -> bool: + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} + + def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ("",) + if self.markers is not None: + return any( + self.markers.evaluate({"extra": extra}) for extra in extras_requested + ) + else: + return True + + @property + def has_hash_options(self) -> bool: + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.hash_options) + + def hashes(self, trust_internet: bool = True) -> Hashes: + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.hash_options.copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self) -> Optional[str]: + """Format a nice indicator to show where this "comes from" """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, str): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += "->" + comes_from + return s + + def ensure_build_location( + self, build_dir: str, autodelete: bool, parallel_builds: bool + ) -> str: + assert build_dir is not None + if self._temp_build_dir is not None: + assert self._temp_build_dir.path + return self._temp_build_dir.path + if self.req is None: + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = TempDirectory( + kind=tempdir_kinds.REQ_BUILD, globally_managed=True + ) + + return self._temp_build_dir.path + + # This is the only remaining place where we manually determine the path + # for the temporary directory. It is only needed for editables where + # it is the value of the --src option. + + # When parallel builds are enabled, add a UUID to the build directory + # name so multiple builds do not interfere with each other. + dir_name: str = canonicalize_name(self.name) + if parallel_builds: + dir_name = f"{dir_name}_{uuid.uuid4().hex}" + + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug("Creating directory %s", build_dir) + os.makedirs(build_dir) + actual_build_dir = os.path.join(build_dir, dir_name) + # `None` indicates that we respect the globally-configured deletion + # settings, which is what we actually want when auto-deleting. + delete_arg = None if autodelete else False + return TempDirectory( + path=actual_build_dir, + delete=delete_arg, + kind=tempdir_kinds.REQ_BUILD, + globally_managed=True, + ).path + + def _set_requirement(self) -> None: + """Set requirement after generating metadata.""" + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None + + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join( + [ + self.metadata["Name"], + op, + self.metadata["Version"], + ] + ) + ) + + def warn_on_mismatching_name(self) -> None: + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + "Generating metadata for package %s " + "produced metadata for project name %s. Fix your " + "#egg=%s fragments.", + self.name, + metadata_name, + self.name, + ) + self.req = Requirement(metadata_name) + + def check_if_exists(self, use_user_site: bool) -> None: + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.should_reinstall appropriately. + """ + if self.req is None: + return + existing_dist = get_default_environment().get_distribution(self.req.name) + if not existing_dist: + return + + version_compatible = self.req.specifier.contains( + existing_dist.version, + prereleases=True, + ) + if not version_compatible: + self.satisfied_by = None + if use_user_site: + if existing_dist.in_usersite: + self.should_reinstall = True + elif running_under_virtualenv() and existing_dist.in_site_packages: + raise InstallationError( + f"Will not install to the user site because it will " + f"lack sys.path precedence to {existing_dist.raw_name} " + f"in {existing_dist.location}" + ) + else: + self.should_reinstall = True + else: + if self.editable: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + else: + self.satisfied_by = existing_dist + + # Things valid for wheels + @property + def is_wheel(self) -> bool: + if not self.link: + return False + return self.link.is_wheel + + # Things valid for sdists + @property + def unpacked_source_directory(self) -> str: + return os.path.join( + self.source_dir, self.link and self.link.subdirectory_fragment or "" + ) + + @property + def setup_py_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_py = os.path.join(self.unpacked_source_directory, "setup.py") + + return setup_py + + @property + def setup_cfg_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg") + + return setup_cfg + + @property + def pyproject_toml_path(self) -> str: + assert self.source_dir, f"No source dir for {self}" + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self) -> None: + """Load the pyproject.toml file. + + After calling this routine, all of the attributes related to PEP 517 + processing for this requirement have been set. In particular, the + use_pep517 attribute can be used to determine whether we should + follow the PEP 517 or legacy (setup.py) code path. + """ + pyproject_toml_data = load_pyproject_toml( + self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) + ) + + if pyproject_toml_data is None: + self.use_pep517 = False + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = Pep517HookCaller( + self.unpacked_source_directory, + backend, + backend_path=backend_path, + ) + + def isolated_editable_sanity_check(self) -> None: + """Check that an editable requirement if valid for use with PEP 517/518. + + This verifies that an editable that has a pyproject.toml either supports PEP 660 + or as a setup.py or a setup.cfg + """ + if ( + self.editable + and self.use_pep517 + and not self.supports_pyproject_editable() + and not os.path.isfile(self.setup_py_path) + and not os.path.isfile(self.setup_cfg_path) + ): + raise InstallationError( + f"Project {self} has a 'pyproject.toml' and its build " + f"backend is missing the 'build_editable' hook. Since it does not " + f"have a 'setup.py' nor a 'setup.cfg', " + f"it cannot be installed in editable mode. " + f"Consider using a build backend that supports PEP 660." + ) + + def prepare_metadata(self) -> None: + """Ensure that project metadata is available. + + Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. + Under legacy processing, call setup.py egg-info. + """ + assert self.source_dir + details = self.name or f"from {self.link}" + + if self.use_pep517: + assert self.pep517_backend is not None + if ( + self.editable + and self.permit_editable_wheels + and self.supports_pyproject_editable() + ): + self.metadata_directory = generate_editable_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + details=details, + ) + else: + self.metadata_directory = generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + details=details, + ) + else: + self.metadata_directory = generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + isolated=self.isolated, + details=details, + ) + + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() + else: + self.warn_on_mismatching_name() + + self.assert_source_matches_version() + + @property + def metadata(self) -> Any: + if not hasattr(self, "_metadata"): + self._metadata = self.get_dist().metadata + + return self._metadata + + def get_dist(self) -> BaseDistribution: + return get_directory_distribution(self.metadata_directory) + + def assert_source_matches_version(self) -> None: + assert self.source_dir + version = self.metadata["version"] + if self.req.specifier and version not in self.req.specifier: + logger.warning( + "Requested %s, but installing version %s", + self, + version, + ) + else: + logger.debug( + "Source in %s has version %s, which satisfies requirement %s", + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir( + self, + parent_dir: str, + autodelete: bool = False, + parallel_builds: bool = False, + ) -> None: + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.ensure_build_location( + parent_dir, + autodelete=autodelete, + parallel_builds=parallel_builds, + ) + + # For editable installations + def update_editable(self) -> None: + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == "file": + # Static paths don't get updated + return + vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) + # Editable requirements are validated in Requirement constructors. + # So here, if it's neither a path nor a valid VCS URL, it's a bug. + assert vcs_backend, f"Unsupported VCS URL {self.link.url}" + hidden_url = hide_url(self.link.url) + vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) + + # Top-level Actions + def uninstall( + self, auto_confirm: bool = False, verbose: bool = False + ) -> Optional[UninstallPathSet]: + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + assert self.req + dist = get_default_environment().get_distribution(self.req.name) + if not dist: + logger.warning("Skipping %s as it is not installed.", self.name) + return None + logger.info("Found existing installation: %s", dist) + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: + def _clean_zip_name(name: str, prefix: str) -> str: + assert name.startswith( + prefix + os.path.sep + ), f"name {name!r} doesn't start with prefix {prefix!r}" + name = name[len(prefix) + 1 :] + name = name.replace(os.path.sep, "/") + return name + + path = os.path.join(parentdir, path) + name = _clean_zip_name(path, rootdir) + return self.name + "/" + name + + def archive(self, build_dir: Optional[str]) -> None: + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ + assert self.source_dir + if build_dir is None: + return + + create_archive = True + archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) + archive_path = os.path.join(build_dir, archive_name) + + if os.path.exists(archive_path): + response = ask_path_exists( + "The file {} exists. (i)gnore, (w)ipe, " + "(b)ackup, (a)bort ".format(display_path(archive_path)), + ("i", "w", "b", "a"), + ) + if response == "i": + create_archive = False + elif response == "w": + logger.warning("Deleting %s", display_path(archive_path)) + os.remove(archive_path) + elif response == "b": + dest_file = backup_dir(archive_path) + logger.warning( + "Backing up %s to %s", + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == "a": + sys.exit(-1) + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, + "w", + zipfile.ZIP_DEFLATED, + allowZip64=True, + ) + with zip_output: + dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) + for dirpath, dirnames, filenames in os.walk(dir): + for dirname in dirnames: + dir_arcname = self._get_archive_name( + dirname, + parentdir=dirpath, + rootdir=dir, + ) + zipdir = zipfile.ZipInfo(dir_arcname + "/") + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip_output.writestr(zipdir, "") + for filename in filenames: + file_arcname = self._get_archive_name( + filename, + parentdir=dirpath, + rootdir=dir, + ) + filename = os.path.join(dirpath, filename) + zip_output.write(filename, file_arcname) + + logger.info("Saved %s", display_path(archive_path)) + + def install( + self, + install_options: List[str], + global_options: Optional[Sequence[str]] = None, + root: Optional[str] = None, + home: Optional[str] = None, + prefix: Optional[str] = None, + warn_script_location: bool = True, + use_user_site: bool = False, + pycompile: bool = True, + ) -> None: + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + + global_options = global_options if global_options is not None else [] + if self.editable and not self.is_wheel: + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + ) + self.install_succeeded = True + return + + if self.is_wheel: + assert self.local_file_path + direct_url = None + if self.editable: + direct_url = direct_url_for_editable(self.unpacked_source_directory) + elif self.original_link: + direct_url = direct_url_from_link( + self.original_link, + self.source_dir, + self.original_link_is_in_wheel_cache, + ) + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=direct_url, + requested=self.user_supplied, + ) + self.install_succeeded = True + return + + # TODO: Why don't we do this for editable installs? + + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options = list(global_options) + self.global_options + install_options = list(install_options) + self.install_options + + try: + success = install_legacy( + install_options=install_options, + global_options=global_options, + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + req_name=self.name, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + req_description=str(self.req), + ) + except LegacyInstallFailure as exc: + self.install_succeeded = False + raise exc + except Exception: + self.install_succeeded = True + raise + + self.install_succeeded = success + + if success and self.legacy_install_reason == 8368: + deprecated( + reason=( + "{} was installed using the legacy 'setup.py install' " + "method, because a wheel could not be built for it.".format( + self.name + ) + ), + replacement="to fix the wheel build issue reported above", + gone_in=None, + issue=8368, + ) + + +def check_invalid_constraint_type(req: InstallRequirement) -> str: + + # Check for unsupported forms + problem = "" + if not req.name: + problem = "Unnamed requirements are not allowed as constraints" + elif req.editable: + problem = "Editable requirements are not allowed as constraints" + elif req.extras: + problem = "Constraints cannot have extras" + + if problem: + deprecated( + reason=( + "Constraints are only allowed to take the form of a package " + "name and a version specifier. Other forms were originally " + "permitted as an accident of the implementation, but were " + "undocumented. The new implementation of the resolver no " + "longer supports these forms." + ), + replacement="replacing the constraint with a requirement", + # No plan yet for when the new resolver becomes default + gone_in=None, + issue=8210, + ) + + return problem diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/req_set.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 000000000..6626c37e2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,189 @@ +import logging +from collections import OrderedDict +from typing import Dict, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InstallationError +from pip._internal.models.wheel import Wheel +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils import compatibility_tags + +logger = logging.getLogger(__name__) + + +class RequirementSet: + def __init__(self, check_supported_wheels: bool = True) -> None: + """Create a RequirementSet.""" + + self.requirements: Dict[str, InstallRequirement] = OrderedDict() + self.check_supported_wheels = check_supported_wheels + + self.unnamed_requirements: List[InstallRequirement] = [] + + def __str__(self) -> str: + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name or ""), + ) + return " ".join(str(req.req) for req in requirements) + + def __repr__(self) -> str: + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name or ""), + ) + + format_string = "<{classname} object; {count} requirement(s): {reqs}>" + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=", ".join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req: InstallRequirement) -> None: + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req: InstallRequirement) -> None: + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req + + def add_requirement( + self, + install_req: InstallRequirement, + parent_req_name: Optional[str] = None, + extras_requested: Optional[Iterable[str]] = None, + ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]: + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, + install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = compatibility_tags.get_supported() + if self.check_supported_wheels and not wheel.supported(tags): + raise InstallationError( + "{} is not a supported wheel on this platform.".format( + wheel.filename + ) + ) + + # This next bit is really a sanity check. + assert ( + not install_req.user_supplied or parent_req_name is None + ), "a user supplied req shouldn't have a parent" + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + self.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req: Optional[InstallRequirement] = self.get_requirement( + install_req.name + ) + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None + and existing_req + and not existing_req.constraint + and existing_req.extras == install_req.extras + and existing_req.req + and install_req.req + and existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + "Double requirement given: {} (already in {}, name={!r})".format( + install_req, existing_req, install_req.name + ) + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + self.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = install_req.link and not ( + existing_req.link and install_req.link.path == existing_req.link.path + ) + if does_not_satisfy_constraint: + raise InstallationError( + "Could not satisfy constraints for '{}': " + "installation from path or url cannot be " + "constrained to a version".format(install_req.name) + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True + existing_req.extras = tuple( + sorted(set(existing_req.extras) | set(install_req.extras)) + ) + logger.debug( + "Setting %s extras to: %s", + existing_req, + existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + + def has_requirement(self, name: str) -> bool: + project_name = canonicalize_name(name) + + return ( + project_name in self.requirements + and not self.requirements[project_name].constraint + ) + + def get_requirement(self, name: str) -> InstallRequirement: + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError(f"No project with the name {name!r}") + + @property + def all_requirements(self) -> List[InstallRequirement]: + return self.unnamed_requirements + list(self.requirements.values()) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py new file mode 100644 index 000000000..24d3c5303 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py @@ -0,0 +1,124 @@ +import contextlib +import hashlib +import logging +import os +from types import TracebackType +from typing import Dict, Iterator, Optional, Set, Type, Union + +from pip._internal.models.link import Link +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def update_env_context_manager(**changes: str) -> Iterator[None]: + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values: Dict[str, Union[object, str]] = {} + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker() -> Iterator["RequirementTracker"]: + root = os.environ.get("PIP_REQ_TRACKER") + with contextlib.ExitStack() as ctx: + if root is None: + root = ctx.enter_context(TempDirectory(kind="req-tracker")).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + +class RequirementTracker: + def __init__(self, root: str) -> None: + self._root = root + self._entries: Set[InstallRequirement] = set() + logger.debug("Created build tracker: %s", self._root) + + def __enter__(self) -> "RequirementTracker": + logger.debug("Entered build tracker: %s", self._root) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.cleanup() + + def _entry_path(self, link: Link) -> str: + hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req: InstallRequirement) -> None: + """Add an InstallRequirement to build tracking.""" + + assert req.link + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. + try: + with open(entry_path) as fp: + contents = fp.read() + except FileNotFoundError: + pass + else: + message = "{} is already being built: {}".format(req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, "w", encoding="utf-8") as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug("Added %s to build tracker %r", req, self._root) + + def remove(self, req: InstallRequirement) -> None: + """Remove an InstallRequirement from build tracking.""" + + assert req.link + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) + self._entries.remove(req) + + logger.debug("Removed %s from build tracker %r", req, self._root) + + def cleanup(self) -> None: + for req in set(self._entries): + self.remove(req) + + logger.debug("Removed build tracker: %r", self._root) + + @contextlib.contextmanager + def track(self, req: InstallRequirement) -> Iterator[None]: + self.add(req) + yield + self.remove(req) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py new file mode 100644 index 000000000..472090a0d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py @@ -0,0 +1,633 @@ +import functools +import os +import sys +import sysconfig +from importlib.util import cache_from_source +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple + +from pip._internal.exceptions import UninstallationError +from pip._internal.locations import get_bin_prefix, get_bin_user +from pip._internal.metadata import BaseDistribution +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.egg_link import egg_link_path_from_location +from pip._internal.utils.logging import getLogger, indent_log +from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree +from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory + +logger = getLogger(__name__) + + +def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str]: + """Create the fully qualified name of the files created by + {console,gui}_scripts for the given ``dist``. + Returns the list of file names + """ + exe_name = os.path.join(bin_dir, script_name) + yield exe_name + if not WINDOWS: + return + yield f"{exe_name}.exe" + yield f"{exe_name}.exe.manifest" + if is_gui: + yield f"{exe_name}-script.pyw" + else: + yield f"{exe_name}-script.py" + + +def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]: + @functools.wraps(fn) + def unique(*args: Any, **kw: Any) -> Iterator[Any]: + seen: Set[Any] = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + + return unique + + +@_unique +def uninstallation_paths(dist: BaseDistribution) -> Iterator[str]: + """ + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc and .pyo in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .py[co]. + + If RECORD is not found, raises UninstallationError, + with possible information from the INSTALLER file. + + https://packaging.python.org/specifications/recording-installed-packages/ + """ + location = dist.location + assert location is not None, "not installed" + + entries = dist.iter_declared_entries() + if entries is None: + msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist) + installer = dist.installer + if not installer or installer == "pip": + dep = "{}=={}".format(dist.raw_name, dist.version) + msg += ( + " You might be able to recover from this via: " + "'pip install --force-reinstall --no-deps {}'.".format(dep) + ) + else: + msg += " Hint: The package was installed by {}.".format(installer) + raise UninstallationError(msg) + + for entry in entries: + path = os.path.join(location, entry) + yield path + if path.endswith(".py"): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + ".pyc") + yield path + path = os.path.join(dn, base + ".pyo") + yield path + + +def compact(paths: Iterable[str]) -> Set[str]: + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + + sep = os.path.sep + short_paths: Set[str] = set() + for path in sorted(paths, key=len): + should_skip = any( + path.startswith(shortpath.rstrip("*")) + and path[len(shortpath.rstrip("*").rstrip(sep))] == sep + for shortpath in short_paths + ) + if not should_skip: + short_paths.add(path) + return short_paths + + +def compress_for_rename(paths: Iterable[str]) -> Set[str]: + """Returns a set containing the paths that need to be renamed. + + This set may include directories when the original sequence of paths + included every file on disk. + """ + case_map = {os.path.normcase(p): p for p in paths} + remaining = set(case_map) + unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len) + wildcards: Set[str] = set() + + def norm_join(*a: str) -> str: + return os.path.normcase(os.path.join(*a)) + + for root in unchecked: + if any(os.path.normcase(root).startswith(w) for w in wildcards): + # This directory has already been handled. + continue + + all_files: Set[str] = set() + all_subdirs: Set[str] = set() + for dirname, subdirs, files in os.walk(root): + all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) + all_files.update(norm_join(root, dirname, f) for f in files) + # If all the files we found are in our remaining set of files to + # remove, then remove them from the latter set and add a wildcard + # for the directory. + if not (all_files - remaining): + remaining.difference_update(all_files) + wildcards.add(root + os.sep) + + return set(map(case_map.__getitem__, remaining)) | wildcards + + +def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]: + """Returns a tuple of 2 sets of which paths to display to user + + The first set contains paths that would be deleted. Files of a package + are not added and the top-level directory of the package has a '*' added + at the end - to signify that all it's contents are removed. + + The second set contains files that would have been skipped in the above + folders. + """ + + will_remove = set(paths) + will_skip = set() + + # Determine folders and files + folders = set() + files = set() + for path in will_remove: + if path.endswith(".pyc"): + continue + if path.endswith("__init__.py") or ".dist-info" in path: + folders.add(os.path.dirname(path)) + files.add(path) + + # probably this one https://github.com/python/mypy/issues/390 + _normcased_files = set(map(os.path.normcase, files)) # type: ignore + + folders = compact(folders) + + # This walks the tree using os.walk to not miss extra folders + # that might get added. + for folder in folders: + for dirpath, _, dirfiles in os.walk(folder): + for fname in dirfiles: + if fname.endswith(".pyc"): + continue + + file_ = os.path.join(dirpath, fname) + if ( + os.path.isfile(file_) + and os.path.normcase(file_) not in _normcased_files + ): + # We are skipping this file. Add it to the set. + will_skip.add(file_) + + will_remove = files | {os.path.join(folder, "*") for folder in folders} + + return will_remove, will_skip + + +class StashedUninstallPathSet: + """A set of file rename operations to stash files while + tentatively uninstalling them.""" + + def __init__(self) -> None: + # Mapping from source file root to [Adjacent]TempDirectory + # for files under that directory. + self._save_dirs: Dict[str, TempDirectory] = {} + # (old path, new path) tuples for each move that may need + # to be undone. + self._moves: List[Tuple[str, str]] = [] + + def _get_directory_stash(self, path: str) -> str: + """Stashes a directory. + + Directories are stashed adjacent to their original location if + possible, or else moved/copied into the user's temp dir.""" + + try: + save_dir: TempDirectory = AdjacentTempDirectory(path) + except OSError: + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[os.path.normcase(path)] = save_dir + + return save_dir.path + + def _get_file_stash(self, path: str) -> str: + """Stashes a file. + + If no root has been provided, one will be created for the directory + in the user's temp directory.""" + path = os.path.normcase(path) + head, old_head = os.path.dirname(path), None + save_dir = None + + while head != old_head: + try: + save_dir = self._save_dirs[head] + break + except KeyError: + pass + head, old_head = os.path.dirname(head), head + else: + # Did not find any suitable root + head = os.path.dirname(path) + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[head] = save_dir + + relpath = os.path.relpath(path, head) + if relpath and relpath != os.path.curdir: + return os.path.join(save_dir.path, relpath) + return save_dir.path + + def stash(self, path: str) -> str: + """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. + """ + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: + new_path = self._get_directory_stash(path) + else: + new_path = self._get_file_stash(path) + + self._moves.append((path, new_path)) + if path_is_dir and os.path.isdir(new_path): + # If we're moving a directory, we need to + # remove the destination first or else it will be + # moved to inside the existing directory. + # We just created new_path ourselves, so it will + # be removable. + os.rmdir(new_path) + renames(path, new_path) + return new_path + + def commit(self) -> None: + """Commits the uninstall by removing stashed files.""" + for _, save_dir in self._save_dirs.items(): + save_dir.cleanup() + self._moves = [] + self._save_dirs = {} + + def rollback(self) -> None: + """Undoes the uninstall by moving stashed files back.""" + for p in self._moves: + logger.info("Moving to %s\n from %s", *p) + + for new_path, path in self._moves: + try: + logger.debug("Replacing %s from %s", new_path, path) + if os.path.isfile(new_path) or os.path.islink(new_path): + os.unlink(new_path) + elif os.path.isdir(new_path): + rmtree(new_path) + renames(path, new_path) + except OSError as ex: + logger.error("Failed to restore %s", new_path) + logger.debug("Exception: %s", ex) + + self.commit() + + @property + def can_rollback(self) -> bool: + return bool(self._moves) + + +class UninstallPathSet: + """A set of file paths to be removed in the uninstallation of a + requirement.""" + + def __init__(self, dist: BaseDistribution) -> None: + self._paths: Set[str] = set() + self._refuse: Set[str] = set() + self._pth: Dict[str, UninstallPthEntries] = {} + self._dist = dist + self._moved_paths = StashedUninstallPathSet() + + def _permitted(self, path: str) -> bool: + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def add(self, path: str) -> None: + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(normalize_path(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self._paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == ".py": + self.add(cache_from_source(path)) + + def add_pth(self, pth_file: str, entry: str) -> None: + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self._pth: + self._pth[pth_file] = UninstallPthEntries(pth_file) + self._pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: + """Remove paths in ``self._paths`` with confirmation (unless + ``auto_confirm`` is True).""" + + if not self._paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self._dist.raw_name, + ) + return + + dist_name_version = f"{self._dist.raw_name}-{self._dist.version}" + logger.info("Uninstalling %s:", dist_name_version) + + with indent_log(): + if auto_confirm or self._allowed_to_proceed(verbose): + moved = self._moved_paths + + for_rename = compress_for_rename(self._paths) + + for path in sorted(compact(for_rename)): + moved.stash(path) + logger.verbose("Removing file or directory %s", path) + + for pth in self._pth.values(): + pth.remove() + + logger.info("Successfully uninstalled %s", dist_name_version) + + def _allowed_to_proceed(self, verbose: bool) -> bool: + """Display which files would be deleted and prompt for confirmation""" + + def _display(msg: str, paths: Iterable[str]) -> None: + if not paths: + return + + logger.info(msg) + with indent_log(): + for path in sorted(compact(paths)): + logger.info(path) + + if not verbose: + will_remove, will_skip = compress_for_output_listing(self._paths) + else: + # In verbose mode, display all the files that are going to be + # deleted. + will_remove = set(self._paths) + will_skip = set() + + _display("Would remove:", will_remove) + _display("Would not remove (might be manually added):", will_skip) + _display("Would not remove (outside of prefix):", self._refuse) + if verbose: + _display("Will actually move:", compress_for_rename(self._paths)) + + return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n" + + def rollback(self) -> None: + """Rollback the changes previously made by remove().""" + if not self._moved_paths.can_rollback: + logger.error( + "Can't roll back %s; was not uninstalled", + self._dist.raw_name, + ) + return + logger.info("Rolling back uninstall of %s", self._dist.raw_name) + self._moved_paths.rollback() + for pth in self._pth.values(): + pth.rollback() + + def commit(self) -> None: + """Remove temporary save dir: rollback will no longer be possible.""" + self._moved_paths.commit() + + @classmethod + def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": + dist_location = dist.location + info_location = dist.info_location + if dist_location is None: + logger.info( + "Not uninstalling %s since it is not installed", + dist.canonical_name, + ) + return cls(dist) + + normalized_dist_location = normalize_path(dist_location) + if not dist.local: + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.canonical_name, + normalized_dist_location, + sys.prefix, + ) + return cls(dist) + + if normalized_dist_location in { + p + for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")} + if p + }: + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.canonical_name, + normalized_dist_location, + ) + return cls(dist) + + paths_to_remove = cls(dist) + develop_egg_link = egg_link_path_from_location(dist.raw_name) + + # Distribution is installed with metadata in a "flat" .egg-info + # directory. This means it is not a modern .dist-info installation, an + # egg, or legacy editable. + setuptools_flat_installation = ( + dist.installed_with_setuptools_egg_info + and info_location is not None + and os.path.exists(info_location) + # If dist is editable and the location points to a ``.egg-info``, + # we are in fact in the legacy editable case. + and not info_location.endswith(f"{dist.setuptools_filename}.egg-info") + ) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if setuptools_flat_installation: + if info_location is not None: + paths_to_remove.add(info_location) + installed_files = dist.iter_declared_entries() + if installed_files is not None: + for installed_file in installed_files: + paths_to_remove.add(os.path.join(dist_location, installed_file)) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.is_file("top_level.txt"): + try: + namespace_packages = dist.read_text("namespace_packages.txt") + except FileNotFoundError: + namespaces = [] + else: + namespaces = namespace_packages.splitlines(keepends=False) + for top_level_pkg in [ + p + for p in dist.read_text("top_level.txt").splitlines() + if p and p not in namespaces + ]: + path = os.path.join(dist_location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(f"{path}.py") + paths_to_remove.add(f"{path}.pyc") + paths_to_remove.add(f"{path}.pyo") + + elif dist.installed_by_distutils: + raise UninstallationError( + "Cannot uninstall {!r}. It is a distutils installed project " + "and thus we cannot accurately determine which files belong " + "to it which would lead to only a partial uninstall.".format( + dist.raw_name, + ) + ) + + elif dist.installed_as_egg: + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist_location) + easy_install_egg = os.path.split(dist_location)[1] + easy_install_pth = os.path.join( + os.path.dirname(dist_location), + "easy-install.pth", + ) + paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg) + + elif dist.installed_with_dist_info: + for path in uninstallation_paths(dist): + paths_to_remove.add(path) + + elif develop_egg_link: + # PEP 660 modern editable is handled in the ``.dist-info`` case + # above, so this only covers the setuptools-style editable. + with open(develop_egg_link) as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + assert link_pointer == dist_location, ( + f"Egg-link {link_pointer} does not match installed location of " + f"{dist.raw_name} (at {dist_location})" + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join( + os.path.dirname(develop_egg_link), "easy-install.pth" + ) + paths_to_remove.add_pth(easy_install_pth, dist_location) + + else: + logger.debug( + "Not sure how to uninstall: %s - Check: %s", + dist, + dist_location, + ) + + if dist.in_usersite: + bin_dir = get_bin_user() + else: + bin_dir = get_bin_prefix() + + # find distutils scripts= scripts + try: + for script in dist.iterdir("scripts"): + paths_to_remove.add(os.path.join(bin_dir, script.name)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, f"{script.name}.bat")) + except (FileNotFoundError, NotADirectoryError): + pass + + # find console_scripts and gui_scripts + def iter_scripts_to_remove( + dist: BaseDistribution, + bin_dir: str, + ) -> Iterator[str]: + for entry_point in dist.iter_entry_points(): + if entry_point.group == "console_scripts": + yield from _script_names(bin_dir, entry_point.name, False) + elif entry_point.group == "gui_scripts": + yield from _script_names(bin_dir, entry_point.name, True) + + for s in iter_scripts_to_remove(dist, bin_dir): + paths_to_remove.add(s) + + return paths_to_remove + + +class UninstallPthEntries: + def __init__(self, pth_file: str) -> None: + self.file = pth_file + self.entries: Set[str] = set() + self._saved_lines: Optional[List[bytes]] = None + + def add(self, entry: str) -> None: + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace("\\", "/") + self.entries.add(entry) + + def remove(self) -> None: + logger.verbose("Removing pth entries from %s:", self.file) + + # If the file doesn't exist, log a warning and return + if not os.path.isfile(self.file): + logger.warning("Cannot remove entries from nonexistent file %s", self.file) + return + with open(self.file, "rb") as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b"\r\n" in line for line in lines): + endline = "\r\n" + else: + endline = "\n" + # handle missing trailing newline + if lines and not lines[-1].endswith(endline.encode("utf-8")): + lines[-1] = lines[-1] + endline.encode("utf-8") + for entry in self.entries: + try: + logger.verbose("Removing entry: %s", entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, "wb") as fh: + fh.writelines(lines) + + def rollback(self) -> bool: + if self._saved_lines is None: + logger.error("Cannot roll back changes to %s, none were made", self.file) + return False + logger.debug("Rolling %s back to previous state", self.file) + with open(self.file, "wb") as fh: + fh.writelines(self._saved_lines) + return True diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..e6119d99f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc new file mode 100644 index 000000000..775f6a9e7 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/base.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/base.py new file mode 100644 index 000000000..42dade18c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/base.py @@ -0,0 +1,20 @@ +from typing import Callable, List, Optional + +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet + +InstallRequirementProvider = Callable[ + [str, Optional[InstallRequirement]], InstallRequirement +] + + +class BaseResolver: + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + raise NotImplementedError() + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + raise NotImplementedError() diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..5a5ccb287 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc new file mode 100644 index 000000000..c2a0777ed Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py new file mode 100644 index 000000000..8c149d437 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py @@ -0,0 +1,467 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import sys +from collections import defaultdict +from itertools import chain +from typing import DefaultDict, Iterable, List, Optional, Set, Tuple + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.requirements import Requirement + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, + NoneMetadataError, + UnsupportedPythonVersion, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import normalize_version_info +from pip._internal.utils.packaging import check_requires_python + +logger = logging.getLogger(__name__) + +DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] + + +def _check_dist_requires_python( + dist: BaseDistribution, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> None: + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + # This idiosyncratically converts the SpecifierSet to str and let + # check_requires_python then parse it again into SpecifierSet. But this + # is the legacy resolver so I'm just not going to bother refactoring. + try: + requires_python = str(dist.requires_python) + except FileNotFoundError as e: + raise NoneMetadataError(dist, str(e)) + try: + is_compatible = check_requires_python( + requires_python, + version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc + ) + return + + if is_compatible: + return + + version = ".".join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + "Ignoring failed Requires-Python check for package %r: %s not in %r", + dist.raw_name, + version, + requires_python, + ) + return + + raise UnsupportedPythonVersion( + "Package {!r} requires a different Python: {} not in {!r}".format( + dist.raw_name, version, requires_python + ) + ) + + +class Resolver(BaseResolver): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + + self.preparer = preparer + self.finder = finder + self.wheel_cache = wheel_cache + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + self._make_install_req = make_install_req + + self._discovered_dependencies: DiscoveredDependencies = defaultdict(list) + + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for req in root_reqs: + if req.constraint: + check_invalid_constraint_type(req) + requirement_set.add_requirement(req) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # _populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs: List[InstallRequirement] = [] + hash_errors = HashErrors() + for req in chain(requirement_set.all_requirements, discovered_reqs): + try: + discovered_reqs.extend(self._resolve_one(requirement_set, req)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + return requirement_set + + def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.user_supplied or req.constraint + + def _set_req_to_reinstall(self, req: InstallRequirement) -> None: + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + if not self.use_user_site or req.satisfied_by.in_usersite: + req.should_reinstall = True + req.satisfied_by = None + + def _check_skip_installed( + self, req_to_install: InstallRequirement + ) -> Optional[str]: + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return "already satisfied, skipping upgrade" + return "already satisfied" + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return "already up-to-date" + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]: + upgrade = self._is_upgrade_allowed(req) + best_candidate = self.finder.find_requirement(req, upgrade) + if not best_candidate: + return None + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or "" + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + "The candidate selected for download or install is a " + "yanked version: {candidate}\n" + "Reason for being yanked: {reason}" + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return link + + def _populate_link(self, req: InstallRequirement) -> None: + """Ensure that if a link can be found for this, that it is found. + + Note that req.link may still be None - if the requirement is already + installed and not needed to be upgraded based on the return value of + _is_upgrade_allowed(). + + If preparer.require_hashes is True, don't use the wheel cache, because + cached wheels, always built locally, have different hashes than the + files downloaded from the index server and thus throw false hash + mismatches. Furthermore, cached wheels at present have undeterministic + contents due to file modification times. + """ + if req.link is None: + req.link = self._find_requirement_link(req) + + if self.wheel_cache is None or self.preparer.require_hashes: + return + cache_entry = self.wheel_cache.get_cache_entry( + link=req.link, + package_name=req.name, + supported_tags=get_supported(), + ) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + if req.link is req.original_link and cache_entry.persistent: + req.original_link_is_in_wheel_cache = True + req.link = cache_entry.link + + def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution: + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + if req.editable: + return self.preparer.prepare_editable_requirement(req) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement(req, skip_reason) + + # We eagerly populate the link, since that's our "legacy" behavior. + self._populate_link(req) + dist = self.preparer.prepare_linked_requirement(req) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" + or self.force_reinstall + or self.ignore_installed + or req.link.scheme == "file" + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + "Requirement already satisfied (use --upgrade to upgrade): %s", + req, + ) + return dist + + def _resolve_one( + self, + requirement_set: RequirementSet, + req_to_install: InstallRequirement, + ) -> List[InstallRequirement]: + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # Parse and return dependencies + dist = self._get_dist_for(req_to_install) + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, + version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) + + more_reqs: List[InstallRequirement] = [] + + def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: + # This idiosyncratically converts the Requirement to str and let + # make_install_req then parse it again into Requirement. But this is + # the legacy resolver so I'm just not going to bother refactoring. + sub_install_req = self._make_install_req(str(subreq), req_to_install) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = requirement_set.add_requirement( + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append(add_to_parent) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.user_supplied + requirement_set.add_requirement(req_to_install, parent_req_name=None) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ",".join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.iter_provided_extras()) + ) + for missing in missing_requested: + logger.warning( + "%s %s does not provide the extra '%s'", + dist.raw_name, + dist.version, + missing, + ) + + available_requested = sorted( + set(dist.iter_provided_extras()) & set(req_to_install.extras) + ) + for subreq in dist.iter_dependencies(available_requested): + add_req(subreq, extras_requested=available_requested) + + return more_reqs + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs: Set[InstallRequirement] = set() + + def schedule(req: InstallRequirement) -> None: + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..b158d2416 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc new file mode 100644 index 000000000..2fc203675 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc new file mode 100644 index 000000000..38ca01e1e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc new file mode 100644 index 000000000..4a36fad1c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc new file mode 100644 index 000000000..af09d400a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc new file mode 100644 index 000000000..24f4f02b1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc new file mode 100644 index 000000000..fd9ababb6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc new file mode 100644 index 000000000..79bca2d59 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc new file mode 100644 index 000000000..67a146b0a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py new file mode 100644 index 000000000..b206692a0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -0,0 +1,141 @@ +from typing import FrozenSet, Iterable, Optional, Tuple, Union + +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion, Version + +from pip._internal.models.link import Link, links_equivalent +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.hashes import Hashes + +CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]] +CandidateVersion = Union[LegacyVersion, Version] + + +def format_name(project: str, extras: FrozenSet[str]) -> str: + if not extras: + return project + canonical_extras = sorted(canonicalize_name(e) for e in extras) + return "{}[{}]".format(project, ",".join(canonical_extras)) + + +class Constraint: + def __init__( + self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link] + ) -> None: + self.specifier = specifier + self.hashes = hashes + self.links = links + + @classmethod + def empty(cls) -> "Constraint": + return Constraint(SpecifierSet(), Hashes(), frozenset()) + + @classmethod + def from_ireq(cls, ireq: InstallRequirement) -> "Constraint": + links = frozenset([ireq.link]) if ireq.link else frozenset() + return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) + + def __bool__(self) -> bool: + return bool(self.specifier) or bool(self.hashes) or bool(self.links) + + def __and__(self, other: InstallRequirement) -> "Constraint": + if not isinstance(other, InstallRequirement): + return NotImplemented + specifier = self.specifier & other.specifier + hashes = self.hashes & other.hashes(trust_internet=False) + links = self.links + if other.link: + links = links.union([other.link]) + return Constraint(specifier, hashes, links) + + def is_satisfied_by(self, candidate: "Candidate") -> bool: + # Reject if there are any mismatched URL constraints on this package. + if self.links and not all(_match_link(link, candidate) for link in self.links): + return False + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class Requirement: + @property + def project_name(self) -> NormalizedName: + """The "project name" of a requirement. + + This is different from ``name`` if this requirement contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Subclass should override") + + @property + def name(self) -> str: + """The name identifying this requirement in the resolver. + + This is different from ``project_name`` if this requirement contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Subclass should override") + + def is_satisfied_by(self, candidate: "Candidate") -> bool: + return False + + def get_candidate_lookup(self) -> CandidateLookup: + raise NotImplementedError("Subclass should override") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") + + +def _match_link(link: Link, candidate: "Candidate") -> bool: + if candidate.source_link: + return links_equivalent(link, candidate.source_link) + return False + + +class Candidate: + @property + def project_name(self) -> NormalizedName: + """The "project name" of the candidate. + + This is different from ``name`` if this candidate contains extras, + in which case ``name`` would contain the ``[...]`` part, while this + refers to the name of the project. + """ + raise NotImplementedError("Override in subclass") + + @property + def name(self) -> str: + """The name identifying this candidate in the resolver. + + This is different from ``project_name`` if this candidate contains + extras, where ``project_name`` would not contain the ``[...]`` part. + """ + raise NotImplementedError("Override in subclass") + + @property + def version(self) -> CandidateVersion: + raise NotImplementedError("Override in subclass") + + @property + def is_installed(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def is_editable(self) -> bool: + raise NotImplementedError("Override in subclass") + + @property + def source_link(self) -> Optional[Link]: + raise NotImplementedError("Override in subclass") + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + raise NotImplementedError("Override in subclass") + + def get_install_requirement(self) -> Optional[InstallRequirement]: + raise NotImplementedError("Override in subclass") + + def format_for_error(self) -> str: + raise NotImplementedError("Subclass should override") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py new file mode 100644 index 000000000..9b8450e86 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -0,0 +1,547 @@ +import logging +import sys +from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast + +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import Version + +from pip._internal.exceptions import ( + HashError, + InstallationSubprocessError, + MetadataInconsistent, +) +from pip._internal.metadata import BaseDistribution +from pip._internal.models.link import Link, links_equivalent +from pip._internal.models.wheel import Wheel +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.misc import normalize_version_info + +from .base import Candidate, CandidateVersion, Requirement, format_name + +if TYPE_CHECKING: + from .factory import Factory + +logger = logging.getLogger(__name__) + +BaseCandidate = Union[ + "AlreadyInstalledCandidate", + "EditableCandidate", + "LinkCandidate", +] + +# Avoid conflicting with the PyPI package "Python". +REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "") + + +def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]: + """The runtime version of BaseCandidate.""" + base_candidate_classes = ( + AlreadyInstalledCandidate, + EditableCandidate, + LinkCandidate, + ) + if isinstance(candidate, base_candidate_classes): + return candidate + return None + + +def make_install_req_from_link( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert not template.editable, "template is editable" + if template.req: + line = str(template.req) + else: + line = link.url + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + ireq.original_link = template.original_link + ireq.link = link + return ireq + + +def make_install_req_from_editable( + link: Link, template: InstallRequirement +) -> InstallRequirement: + assert template.editable, "template not editable" + return install_req_from_editable( + link.url, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + permit_editable_wheels=template.permit_editable_wheels, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + + +def _make_install_req_from_dist( + dist: BaseDistribution, template: InstallRequirement +) -> InstallRequirement: + if template.req: + line = str(template.req) + elif template.link: + line = f"{dist.canonical_name} @ {template.link.url}" + else: + line = f"{dist.canonical_name}=={dist.version}" + ireq = install_req_from_line( + line, + user_supplied=template.user_supplied, + comes_from=template.comes_from, + use_pep517=template.use_pep517, + isolated=template.isolated, + constraint=template.constraint, + options=dict( + install_options=template.install_options, + global_options=template.global_options, + hashes=template.hash_options, + ), + ) + ireq.satisfied_by = dist + return ireq + + +class _InstallRequirementBackedCandidate(Candidate): + """A candidate backed by an ``InstallRequirement``. + + This represents a package request with the target not being already + in the environment, and needs to be fetched and installed. The backing + ``InstallRequirement`` is responsible for most of the leg work; this + class exposes appropriate information to the resolver. + + :param link: The link passed to the ``InstallRequirement``. The backing + ``InstallRequirement`` will use this link to fetch the distribution. + :param source_link: The link this candidate "originates" from. This is + different from ``link`` when the link is found in the wheel cache. + ``link`` would point to the wheel cache, while this points to the + found remote link (e.g. from pypi.org). + """ + + dist: BaseDistribution + is_installed = False + + def __init__( + self, + link: Link, + source_link: Link, + ireq: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + self._link = link + self._source_link = source_link + self._factory = factory + self._ireq = ireq + self._name = name + self._version = version + self.dist = self._prepare() + + def __str__(self) -> str: + return f"{self.name} {self.version}" + + def __repr__(self) -> str: + return "{class_name}({link!r})".format( + class_name=self.__class__.__name__, + link=str(self._link), + ) + + def __hash__(self) -> int: + return hash((self.__class__, self._link)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return links_equivalent(self._link, other._link) + return False + + @property + def source_link(self) -> Optional[Link]: + return self._source_link + + @property + def project_name(self) -> NormalizedName: + """The normalised name of the project the candidate refers to""" + if self._name is None: + self._name = self.dist.canonical_name + return self._name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> CandidateVersion: + if self._version is None: + self._version = self.dist.version + return self._version + + def format_for_error(self) -> str: + return "{} {} (from {})".format( + self.name, + self.version, + self._link.file_path if self._link.is_file else self._link, + ) + + def _prepare_distribution(self) -> BaseDistribution: + raise NotImplementedError("Override in subclass") + + def _check_metadata_consistency(self, dist: BaseDistribution) -> None: + """Check for consistency of project name and version of dist.""" + if self._name is not None and self._name != dist.canonical_name: + raise MetadataInconsistent( + self._ireq, + "name", + self._name, + dist.canonical_name, + ) + if self._version is not None and self._version != dist.version: + raise MetadataInconsistent( + self._ireq, + "version", + str(self._version), + str(dist.version), + ) + + def _prepare(self) -> BaseDistribution: + try: + dist = self._prepare_distribution() + except HashError as e: + # Provide HashError the underlying ireq that caused it. This + # provides context for the resulting error message to show the + # offending line to the user. + e.req = self._ireq + raise + except InstallationSubprocessError as exc: + # The output has been presented already, so don't duplicate it. + exc.context = "See above for output." + raise + + self._check_metadata_consistency(dist) + return dist + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + requires = self.dist.iter_dependencies() if with_requires else () + for r in requires: + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield self._factory.make_requires_python_requirement(self.dist.requires_python) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return self._ireq + + +class LinkCandidate(_InstallRequirementBackedCandidate): + is_editable = False + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + source_link = link + cache_entry = factory.get_wheel_cache_entry(link, name) + if cache_entry is not None: + logger.debug("Using cached wheel link: %s", cache_entry.link) + link = cache_entry.link + ireq = make_install_req_from_link(link, template) + assert ireq.link == link + if ireq.link.is_wheel and not ireq.link.is_file: + wheel = Wheel(ireq.link.filename) + wheel_name = canonicalize_name(wheel.name) + assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel" + # Version may not be present for PEP 508 direct URLs + if version is not None: + wheel_version = Version(wheel.version) + assert version == wheel_version, "{!r} != {!r} for wheel {}".format( + version, wheel_version, name + ) + + if ( + cache_entry is not None + and cache_entry.persistent + and template.link is template.original_link + ): + ireq.original_link_is_in_wheel_cache = True + + super().__init__( + link=link, + source_link=source_link, + ireq=ireq, + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + preparer = self._factory.preparer + return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True) + + +class EditableCandidate(_InstallRequirementBackedCandidate): + is_editable = True + + def __init__( + self, + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: + super().__init__( + link=link, + source_link=link, + ireq=make_install_req_from_editable(link, template), + factory=factory, + name=name, + version=version, + ) + + def _prepare_distribution(self) -> BaseDistribution: + return self._factory.preparer.prepare_editable_requirement(self._ireq) + + +class AlreadyInstalledCandidate(Candidate): + is_installed = True + source_link = None + + def __init__( + self, + dist: BaseDistribution, + template: InstallRequirement, + factory: "Factory", + ) -> None: + self.dist = dist + self._ireq = _make_install_req_from_dist(dist, template) + self._factory = factory + + # This is just logging some messages, so we can do it eagerly. + # The returned dist would be exactly the same as self.dist because we + # set satisfied_by in _make_install_req_from_dist. + # TODO: Supply reason based on force_reinstall and upgrade_strategy. + skip_reason = "already satisfied" + factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) + + def __str__(self) -> str: + return str(self.dist) + + def __repr__(self) -> str: + return "{class_name}({distribution!r})".format( + class_name=self.__class__.__name__, + distribution=self.dist, + ) + + def __hash__(self) -> int: + return hash((self.__class__, self.name, self.version)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.name == other.name and self.version == other.version + return False + + @property + def project_name(self) -> NormalizedName: + return self.dist.canonical_name + + @property + def name(self) -> str: + return self.project_name + + @property + def version(self) -> CandidateVersion: + return self.dist.version + + @property + def is_editable(self) -> bool: + return self.dist.editable + + def format_for_error(self) -> str: + return f"{self.name} {self.version} (Installed)" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + if not with_requires: + return + for r in self.dist.iter_dependencies(): + yield self._factory.make_requirement_from_spec(str(r), self._ireq) + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return None + + +class ExtrasCandidate(Candidate): + """A candidate that has 'extras', indicating additional dependencies. + + Requirements can be for a project with dependencies, something like + foo[extra]. The extras don't affect the project/version being installed + directly, but indicate that we need additional dependencies. We model that + by having an artificial ExtrasCandidate that wraps the "base" candidate. + + The ExtrasCandidate differs from the base in the following ways: + + 1. It has a unique name, of the form foo[extra]. This causes the resolver + to treat it as a separate node in the dependency graph. + 2. When we're getting the candidate's dependencies, + a) We specify that we want the extra dependencies as well. + b) We add a dependency on the base candidate. + See below for why this is needed. + 3. We return None for the underlying InstallRequirement, as the base + candidate will provide it, and we don't want to end up with duplicates. + + The dependency on the base candidate is needed so that the resolver can't + decide that it should recommend foo[extra1] version 1.0 and foo[extra2] + version 2.0. Having those candidates depend on foo=1.0 and foo=2.0 + respectively forces the resolver to recognise that this is a conflict. + """ + + def __init__( + self, + base: BaseCandidate, + extras: FrozenSet[str], + ) -> None: + self.base = base + self.extras = extras + + def __str__(self) -> str: + name, rest = str(self.base).split(" ", 1) + return "{}[{}] {}".format(name, ",".join(self.extras), rest) + + def __repr__(self) -> str: + return "{class_name}(base={base!r}, extras={extras!r})".format( + class_name=self.__class__.__name__, + base=self.base, + extras=self.extras, + ) + + def __hash__(self) -> int: + return hash((self.base, self.extras)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self.base == other.base and self.extras == other.extras + return False + + @property + def project_name(self) -> NormalizedName: + return self.base.project_name + + @property + def name(self) -> str: + """The normalised name of the project the candidate refers to""" + return format_name(self.base.project_name, self.extras) + + @property + def version(self) -> CandidateVersion: + return self.base.version + + def format_for_error(self) -> str: + return "{} [{}]".format( + self.base.format_for_error(), ", ".join(sorted(self.extras)) + ) + + @property + def is_installed(self) -> bool: + return self.base.is_installed + + @property + def is_editable(self) -> bool: + return self.base.is_editable + + @property + def source_link(self) -> Optional[Link]: + return self.base.source_link + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + factory = self.base._factory + + # Add a dependency on the exact base + # (See note 2b in the class docstring) + yield factory.make_requirement_from_candidate(self.base) + if not with_requires: + return + + # The user may have specified extras that the candidate doesn't + # support. We ignore any unsupported extras here. + valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) + invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) + for extra in sorted(invalid_extras): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + for r in self.base.dist.iter_dependencies(valid_extras): + requirement = factory.make_requirement_from_spec( + str(r), self.base._ireq, valid_extras + ) + if requirement: + yield requirement + + def get_install_requirement(self) -> Optional[InstallRequirement]: + # We don't return anything here, because we always + # depend on the base candidate, and we'll get the + # install requirement from that. + return None + + +class RequiresPythonCandidate(Candidate): + is_installed = False + source_link = None + + def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None: + if py_version_info is not None: + version_info = normalize_version_info(py_version_info) + else: + version_info = sys.version_info[:3] + self._version = Version(".".join(str(c) for c in version_info)) + + # We don't need to implement __eq__() and __ne__() since there is always + # only one RequiresPythonCandidate in a resolution, i.e. the host Python. + # The built-in object.__eq__() and object.__ne__() do exactly what we want. + + def __str__(self) -> str: + return f"Python {self._version}" + + @property + def project_name(self) -> NormalizedName: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def name(self) -> str: + return REQUIRES_PYTHON_IDENTIFIER + + @property + def version(self) -> CandidateVersion: + return self._version + + def format_for_error(self) -> str: + return f"Python {self.version}" + + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: + return () + + def get_install_requirement(self) -> Optional[InstallRequirement]: + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py new file mode 100644 index 000000000..261d8d560 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -0,0 +1,739 @@ +import contextlib +import functools +import logging +from typing import ( + TYPE_CHECKING, + Dict, + FrozenSet, + Iterable, + Iterator, + List, + Mapping, + NamedTuple, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + cast, +) + +from pip._vendor.packaging.requirements import InvalidRequirement +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.resolvelib import ResolutionImpossible + +from pip._internal.cache import CacheEntry, WheelCache +from pip._internal.exceptions import ( + DistributionNotFound, + InstallationError, + InstallationSubprocessError, + MetadataInconsistent, + UnsupportedPythonVersion, + UnsupportedWheel, +) +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_default_environment +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_from_link_and_ireq +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) +from pip._internal.resolution.base import InstallRequirementProvider +from pip._internal.utils.compatibility_tags import get_supported +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.packaging import get_requirement +from pip._internal.utils.virtualenv import running_under_virtualenv + +from .base import Candidate, CandidateVersion, Constraint, Requirement +from .candidates import ( + AlreadyInstalledCandidate, + BaseCandidate, + EditableCandidate, + ExtrasCandidate, + LinkCandidate, + RequiresPythonCandidate, + as_base_candidate, +) +from .found_candidates import FoundCandidates, IndexCandidateInfo +from .requirements import ( + ExplicitRequirement, + RequiresPythonRequirement, + SpecifierRequirement, + UnsatisfiableRequirement, +) + +if TYPE_CHECKING: + from typing import Protocol + + class ConflictCause(Protocol): + requirement: RequiresPythonRequirement + parent: Candidate + + +logger = logging.getLogger(__name__) + +C = TypeVar("C") +Cache = Dict[Link, C] + + +class CollectedRootRequirements(NamedTuple): + requirements: List[Requirement] + constraints: Dict[str, Constraint] + user_requested: Dict[str, int] + + +class Factory: + def __init__( + self, + finder: PackageFinder, + preparer: RequirementPreparer, + make_install_req: InstallRequirementProvider, + wheel_cache: Optional[WheelCache], + use_user_site: bool, + force_reinstall: bool, + ignore_installed: bool, + ignore_requires_python: bool, + suppress_build_failures: bool, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: + self._finder = finder + self.preparer = preparer + self._wheel_cache = wheel_cache + self._python_candidate = RequiresPythonCandidate(py_version_info) + self._make_install_req_from_spec = make_install_req + self._use_user_site = use_user_site + self._force_reinstall = force_reinstall + self._ignore_requires_python = ignore_requires_python + self._suppress_build_failures = suppress_build_failures + + self._build_failures: Cache[InstallationError] = {} + self._link_candidate_cache: Cache[LinkCandidate] = {} + self._editable_candidate_cache: Cache[EditableCandidate] = {} + self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} + self._extras_candidate_cache: Dict[ + Tuple[int, FrozenSet[str]], ExtrasCandidate + ] = {} + + if not ignore_installed: + env = get_default_environment() + self._installed_dists = { + dist.canonical_name: dist + for dist in env.iter_installed_distributions(local_only=False) + } + else: + self._installed_dists = {} + + @property + def force_reinstall(self) -> bool: + return self._force_reinstall + + def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: + if not link.is_wheel: + return + wheel = Wheel(link.filename) + if wheel.supported(self._finder.target_python.get_tags()): + return + msg = f"{link.filename} is not a supported wheel on this platform." + raise UnsupportedWheel(msg) + + def _make_extras_candidate( + self, base: BaseCandidate, extras: FrozenSet[str] + ) -> ExtrasCandidate: + cache_key = (id(base), extras) + try: + candidate = self._extras_candidate_cache[cache_key] + except KeyError: + candidate = ExtrasCandidate(base, extras) + self._extras_candidate_cache[cache_key] = candidate + return candidate + + def _make_candidate_from_dist( + self, + dist: BaseDistribution, + extras: FrozenSet[str], + template: InstallRequirement, + ) -> Candidate: + try: + base = self._installed_candidate_cache[dist.canonical_name] + except KeyError: + base = AlreadyInstalledCandidate(dist, template, factory=self) + self._installed_candidate_cache[dist.canonical_name] = base + if not extras: + return base + return self._make_extras_candidate(base, extras) + + def _make_candidate_from_link( + self, + link: Link, + extras: FrozenSet[str], + template: InstallRequirement, + name: Optional[NormalizedName], + version: Optional[CandidateVersion], + ) -> Optional[Candidate]: + # TODO: Check already installed candidate, and use it if the link and + # editable flag match. + + if link in self._build_failures: + # We already tried this candidate before, and it does not build. + # Don't bother trying again. + return None + + if template.editable: + if link not in self._editable_candidate_cache: + try: + self._editable_candidate_cache[link] = EditableCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except MetadataInconsistent as e: + logger.info( + "Discarding [blue underline]%s[/]: [yellow]%s[reset]", + link, + e, + extra={"markup": True}, + ) + self._build_failures[link] = e + return None + except InstallationSubprocessError as e: + if not self._suppress_build_failures: + raise + logger.warning("Discarding %s due to build failure: %s", link, e) + self._build_failures[link] = e + return None + + base: BaseCandidate = self._editable_candidate_cache[link] + else: + if link not in self._link_candidate_cache: + try: + self._link_candidate_cache[link] = LinkCandidate( + link, + template, + factory=self, + name=name, + version=version, + ) + except MetadataInconsistent as e: + logger.info( + "Discarding [blue underline]%s[/]: [yellow]%s[reset]", + link, + e, + extra={"markup": True}, + ) + self._build_failures[link] = e + return None + except InstallationSubprocessError as e: + if not self._suppress_build_failures: + raise + logger.warning("Discarding %s due to build failure: %s", link, e) + self._build_failures[link] = e + return None + base = self._link_candidate_cache[link] + + if not extras: + return base + return self._make_extras_candidate(base, extras) + + def _iter_found_candidates( + self, + ireqs: Sequence[InstallRequirement], + specifier: SpecifierSet, + hashes: Hashes, + prefers_installed: bool, + incompatible_ids: Set[int], + ) -> Iterable[Candidate]: + if not ireqs: + return () + + # The InstallRequirement implementation requires us to give it a + # "template". Here we just choose the first requirement to represent + # all of them. + # Hopefully the Project model can correct this mismatch in the future. + template = ireqs[0] + assert template.req, "Candidates found on index must be PEP 508" + name = canonicalize_name(template.req.name) + + extras: FrozenSet[str] = frozenset() + for ireq in ireqs: + assert ireq.req, "Candidates found on index must be PEP 508" + specifier &= ireq.req.specifier + hashes &= ireq.hashes(trust_internet=False) + extras |= frozenset(ireq.extras) + + def _get_installed_candidate() -> Optional[Candidate]: + """Get the candidate for the currently-installed version.""" + # If --force-reinstall is set, we want the version from the index + # instead, so we "pretend" there is nothing installed. + if self._force_reinstall: + return None + try: + installed_dist = self._installed_dists[name] + except KeyError: + return None + # Don't use the installed distribution if its version does not fit + # the current dependency graph. + if not specifier.contains(installed_dist.version, prereleases=True): + return None + candidate = self._make_candidate_from_dist( + dist=installed_dist, + extras=extras, + template=template, + ) + # The candidate is a known incompatibility. Don't use it. + if id(candidate) in incompatible_ids: + return None + return candidate + + def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: + result = self._finder.find_best_candidate( + project_name=name, + specifier=specifier, + hashes=hashes, + ) + icans = list(result.iter_applicable()) + + # PEP 592: Yanked releases are ignored unless the specifier + # explicitly pins a version (via '==' or '===') that can be + # solely satisfied by a yanked release. + all_yanked = all(ican.link.is_yanked for ican in icans) + + def is_pinned(specifier: SpecifierSet) -> bool: + for sp in specifier: + if sp.operator == "===": + return True + if sp.operator != "==": + continue + if sp.version.endswith(".*"): + continue + return True + return False + + pinned = is_pinned(specifier) + + # PackageFinder returns earlier versions first, so we reverse. + for ican in reversed(icans): + if not (all_yanked and pinned) and ican.link.is_yanked: + continue + func = functools.partial( + self._make_candidate_from_link, + link=ican.link, + extras=extras, + template=template, + name=name, + version=ican.version, + ) + yield ican.version, func + + return FoundCandidates( + iter_index_candidate_infos, + _get_installed_candidate(), + prefers_installed, + incompatible_ids, + ) + + def _iter_explicit_candidates_from_base( + self, + base_requirements: Iterable[Requirement], + extras: FrozenSet[str], + ) -> Iterator[Candidate]: + """Produce explicit candidates from the base given an extra-ed package. + + :param base_requirements: Requirements known to the resolver. The + requirements are guaranteed to not have extras. + :param extras: The extras to inject into the explicit requirements' + candidates. + """ + for req in base_requirements: + lookup_cand, _ = req.get_candidate_lookup() + if lookup_cand is None: # Not explicit. + continue + # We've stripped extras from the identifier, and should always + # get a BaseCandidate here, unless there's a bug elsewhere. + base_cand = as_base_candidate(lookup_cand) + assert base_cand is not None, "no extras here" + yield self._make_extras_candidate(base_cand, extras) + + def _iter_candidates_from_constraints( + self, + identifier: str, + constraint: Constraint, + template: InstallRequirement, + ) -> Iterator[Candidate]: + """Produce explicit candidates from constraints. + + This creates "fake" InstallRequirement objects that are basically clones + of what "should" be the template, but with original_link set to link. + """ + for link in constraint.links: + self._fail_if_link_is_unsupported_wheel(link) + candidate = self._make_candidate_from_link( + link, + extras=frozenset(), + template=install_req_from_link_and_ireq(link, template), + name=canonicalize_name(identifier), + version=None, + ) + if candidate: + yield candidate + + def find_candidates( + self, + identifier: str, + requirements: Mapping[str, Iterable[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + constraint: Constraint, + prefers_installed: bool, + ) -> Iterable[Candidate]: + # Collect basic lookup information from the requirements. + explicit_candidates: Set[Candidate] = set() + ireqs: List[InstallRequirement] = [] + for req in requirements[identifier]: + cand, ireq = req.get_candidate_lookup() + if cand is not None: + explicit_candidates.add(cand) + if ireq is not None: + ireqs.append(ireq) + + # If the current identifier contains extras, add explicit candidates + # from entries from extra-less identifier. + with contextlib.suppress(InvalidRequirement): + parsed_requirement = get_requirement(identifier) + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + + # Add explicit candidates from constraints. We only do this if there are + # known ireqs, which represent requirements not already explicit. If + # there are no ireqs, we're constraining already-explicit requirements, + # which is handled later when we return the explicit candidates. + if ireqs: + try: + explicit_candidates.update( + self._iter_candidates_from_constraints( + identifier, + constraint, + template=ireqs[0], + ), + ) + except UnsupportedWheel: + # If we're constrained to install a wheel incompatible with the + # target architecture, no candidates will ever be valid. + return () + + # Since we cache all the candidates, incompatibility identification + # can be made quicker by comparing only the id() values. + incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())} + + # If none of the requirements want an explicit candidate, we can ask + # the finder for candidates. + if not explicit_candidates: + return self._iter_found_candidates( + ireqs, + constraint.specifier, + constraint.hashes, + prefers_installed, + incompat_ids, + ) + + return ( + c + for c in explicit_candidates + if id(c) not in incompat_ids + and constraint.is_satisfied_by(c) + and all(req.is_satisfied_by(c) for req in requirements[identifier]) + ) + + def _make_requirement_from_install_req( + self, ireq: InstallRequirement, requested_extras: Iterable[str] + ) -> Optional[Requirement]: + if not ireq.match_markers(requested_extras): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + ireq.name, + ireq.markers, + ) + return None + if not ireq.link: + return SpecifierRequirement(ireq) + self._fail_if_link_is_unsupported_wheel(ireq.link) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + return UnsatisfiableRequirement(canonicalize_name(ireq.name)) + return self.make_requirement_from_candidate(cand) + + def collect_root_requirements( + self, root_ireqs: List[InstallRequirement] + ) -> CollectedRootRequirements: + collected = CollectedRootRequirements([], {}, {}) + for i, ireq in enumerate(root_ireqs): + if ireq.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(ireq) + if problem: + raise InstallationError(problem) + if not ireq.match_markers(): + continue + assert ireq.name, "Constraint must be named" + name = canonicalize_name(ireq.name) + if name in collected.constraints: + collected.constraints[name] &= ireq + else: + collected.constraints[name] = Constraint.from_ireq(ireq) + else: + req = self._make_requirement_from_install_req( + ireq, + requested_extras=(), + ) + if req is None: + continue + if ireq.user_supplied and req.name not in collected.user_requested: + collected.user_requested[req.name] = i + collected.requirements.append(req) + return collected + + def make_requirement_from_candidate( + self, candidate: Candidate + ) -> ExplicitRequirement: + return ExplicitRequirement(candidate) + + def make_requirement_from_spec( + self, + specifier: str, + comes_from: Optional[InstallRequirement], + requested_extras: Iterable[str] = (), + ) -> Optional[Requirement]: + ireq = self._make_install_req_from_spec(specifier, comes_from) + return self._make_requirement_from_install_req(ireq, requested_extras) + + def make_requires_python_requirement( + self, + specifier: SpecifierSet, + ) -> Optional[Requirement]: + if self._ignore_requires_python: + return None + # Don't bother creating a dependency for an empty Requires-Python. + if not str(specifier): + return None + return RequiresPythonRequirement(specifier, self._python_candidate) + + def get_wheel_cache_entry( + self, link: Link, name: Optional[str] + ) -> Optional[CacheEntry]: + """Look up the link in the wheel cache. + + If ``preparer.require_hashes`` is True, don't use the wheel cache, + because cached wheels, always built locally, have different hashes + than the files downloaded from the index server and thus throw false + hash mismatches. Furthermore, cached wheels at present have + nondeterministic contents due to file modification times. + """ + if self._wheel_cache is None or self.preparer.require_hashes: + return None + return self._wheel_cache.get_cache_entry( + link=link, + package_name=name, + supported_tags=get_supported(), + ) + + def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]: + # TODO: Are there more cases this needs to return True? Editable? + dist = self._installed_dists.get(candidate.project_name) + if dist is None: # Not installed, no uninstallation required. + return None + + # We're installing into global site. The current installation must + # be uninstalled, no matter it's in global or user site, because the + # user site installation has precedence over global. + if not self._use_user_site: + return dist + + # We're installing into user site. Remove the user site installation. + if dist.in_usersite: + return dist + + # We're installing into user site, but the installed incompatible + # package is in global site. We can't uninstall that, and would let + # the new user installation to "shadow" it. But shadowing won't work + # in virtual environments, so we error out. + if running_under_virtualenv() and dist.in_site_packages: + message = ( + f"Will not install to the user site because it will lack " + f"sys.path precedence to {dist.raw_name} in {dist.location}" + ) + raise InstallationError(message) + return None + + def _report_requires_python_error( + self, causes: Sequence["ConflictCause"] + ) -> UnsupportedPythonVersion: + assert causes, "Requires-Python error reported with no cause" + + version = self._python_candidate.version + + if len(causes) == 1: + specifier = str(causes[0].requirement.specifier) + message = ( + f"Package {causes[0].parent.name!r} requires a different " + f"Python: {version} not in {specifier!r}" + ) + return UnsupportedPythonVersion(message) + + message = f"Packages require a different Python. {version} not in:" + for cause in causes: + package = cause.parent.format_for_error() + specifier = str(cause.requirement.specifier) + message += f"\n{specifier!r} (required by {package})" + return UnsupportedPythonVersion(message) + + def _report_single_requirement_conflict( + self, req: Requirement, parent: Optional[Candidate] + ) -> DistributionNotFound: + if parent is None: + req_disp = str(req) + else: + req_disp = f"{req} (from {parent.name})" + + cands = self._finder.find_all_candidates(req.project_name) + versions = [str(v) for v in sorted({c.version for c in cands})] + + logger.critical( + "Could not find a version that satisfies the requirement %s " + "(from versions: %s)", + req_disp, + ", ".join(versions) or "none", + ) + if str(req) == "requirements.txt": + logger.info( + "HINT: You are attempting to install a package literally " + 'named "requirements.txt" (which cannot exist). Consider ' + "using the '-r' flag to install the packages listed in " + "requirements.txt" + ) + + return DistributionNotFound(f"No matching distribution found for {req}") + + def get_installation_error( + self, + e: "ResolutionImpossible[Requirement, Candidate]", + constraints: Dict[str, Constraint], + ) -> InstallationError: + + assert e.causes, "Installation error reported with no cause" + + # If one of the things we can't solve is "we need Python X.Y", + # that is what we report. + requires_python_causes = [ + cause + for cause in e.causes + if isinstance(cause.requirement, RequiresPythonRequirement) + and not cause.requirement.is_satisfied_by(self._python_candidate) + ] + if requires_python_causes: + # The comprehension above makes sure all Requirement instances are + # RequiresPythonRequirement, so let's cast for convenience. + return self._report_requires_python_error( + cast("Sequence[ConflictCause]", requires_python_causes), + ) + + # Otherwise, we have a set of causes which can't all be satisfied + # at once. + + # The simplest case is when we have *one* cause that can't be + # satisfied. We just report that case. + if len(e.causes) == 1: + req, parent = e.causes[0] + if req.name not in constraints: + return self._report_single_requirement_conflict(req, parent) + + # OK, we now have a list of requirements that can't all be + # satisfied at once. + + # A couple of formatting helpers + def text_join(parts: List[str]) -> str: + if len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def describe_trigger(parent: Candidate) -> str: + ireq = parent.get_install_requirement() + if not ireq or not ireq.comes_from: + return f"{parent.name}=={parent.version}" + if isinstance(ireq.comes_from, InstallRequirement): + return str(ireq.comes_from.name) + return str(ireq.comes_from) + + triggers = set() + for req, parent in e.causes: + if parent is None: + # This is a root requirement, so we can report it directly + trigger = req.format_for_error() + else: + trigger = describe_trigger(parent) + triggers.add(trigger) + + if triggers: + info = text_join(sorted(triggers)) + else: + info = "the requested packages" + + msg = ( + "Cannot install {} because these package versions " + "have conflicting dependencies.".format(info) + ) + logger.critical(msg) + msg = "\nThe conflict is caused by:" + + relevant_constraints = set() + for req, parent in e.causes: + if req.name in constraints: + relevant_constraints.add(req.name) + msg = msg + "\n " + if parent: + msg = msg + f"{parent.name} {parent.version} depends on " + else: + msg = msg + "The user requested " + msg = msg + req.format_for_error() + for key in relevant_constraints: + spec = constraints[key].specifier + msg += f"\n The user requested (constraint) {key}{spec}" + + msg = ( + msg + + "\n\n" + + "To fix this you could try to:\n" + + "1. loosen the range of package versions you've specified\n" + + "2. remove package versions to allow pip attempt to solve " + + "the dependency conflict\n" + ) + + logger.info(msg) + + return DistributionNotFound( + "ResolutionImpossible: for help visit " + "https://pip.pypa.io/en/latest/topics/dependency-resolution/" + "#dealing-with-dependency-conflicts" + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py new file mode 100644 index 000000000..8663097b4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py @@ -0,0 +1,155 @@ +"""Utilities to lazily create and visit candidates found. + +Creating and visiting a candidate is a *very* costly operation. It involves +fetching, extracting, potentially building modules from source, and verifying +distribution metadata. It is therefore crucial for performance to keep +everything here lazy all the way down, so we only touch candidates that we +absolutely need, and not "download the world" when we only need one version of +something. +""" + +import functools +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple + +from pip._vendor.packaging.version import _BaseVersion + +from .base import Candidate + +IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]] + +if TYPE_CHECKING: + SequenceCandidate = Sequence[Candidate] +else: + # For compatibility: Python before 3.9 does not support using [] on the + # Sequence class. + # + # >>> from collections.abc import Sequence + # >>> Sequence[str] + # Traceback (most recent call last): + # File "", line 1, in + # TypeError: 'ABCMeta' object is not subscriptable + # + # TODO: Remove this block after dropping Python 3.8 support. + SequenceCandidate = Sequence + + +def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the package is not already installed. Candidates + from index come later in their normal ordering. + """ + versions_found: Set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_prepended( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers the already-installed + candidate and NOT to upgrade. The installed candidate is therefore + always yielded first, and candidates from index come later in their + normal ordering, except skipped when the version is already installed. + """ + yield installed + versions_found: Set[_BaseVersion] = {installed.version} + for version, func in infos: + if version in versions_found: + continue + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + +def _iter_built_with_inserted( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: + """Iterator for ``FoundCandidates``. + + This iterator is used when the resolver prefers to upgrade an + already-installed package. Candidates from index are returned in their + normal ordering, except replaced when the version is already installed. + + The implementation iterates through and yields other candidates, inserting + the installed candidate exactly once before we start yielding older or + equivalent candidates, or after all other candidates if they are all newer. + """ + versions_found: Set[_BaseVersion] = set() + for version, func in infos: + if version in versions_found: + continue + # If the installed candidate is better, yield it first. + if installed.version >= version: + yield installed + versions_found.add(installed.version) + candidate = func() + if candidate is None: + continue + yield candidate + versions_found.add(version) + + # If the installed candidate is older than all other candidates. + if installed.version not in versions_found: + yield installed + + +class FoundCandidates(SequenceCandidate): + """A lazy sequence to provide candidates to the resolver. + + The intended usage is to return this from `find_matches()` so the resolver + can iterate through the sequence multiple times, but only access the index + page when remote packages are actually needed. This improve performances + when suitable candidates are already installed on disk. + """ + + def __init__( + self, + get_infos: Callable[[], Iterator[IndexCandidateInfo]], + installed: Optional[Candidate], + prefers_installed: bool, + incompatible_ids: Set[int], + ): + self._get_infos = get_infos + self._installed = installed + self._prefers_installed = prefers_installed + self._incompatible_ids = incompatible_ids + + def __getitem__(self, index: Any) -> Any: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + def __iter__(self) -> Iterator[Candidate]: + infos = self._get_infos() + if not self._installed: + iterator = _iter_built(infos) + elif self._prefers_installed: + iterator = _iter_built_with_prepended(self._installed, infos) + else: + iterator = _iter_built_with_inserted(self._installed, infos) + return (c for c in iterator if id(c) not in self._incompatible_ids) + + def __len__(self) -> int: + # Implemented to satisfy the ABC check. This is not needed by the + # resolver, and should not be used by the provider either (for + # performance reasons). + raise NotImplementedError("don't do this") + + @functools.lru_cache(maxsize=1) + def __bool__(self) -> bool: + if self._prefers_installed and self._installed: + return True + return any(self) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py new file mode 100644 index 000000000..e6ec9594f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -0,0 +1,248 @@ +import collections +import math +from typing import ( + TYPE_CHECKING, + Dict, + Iterable, + Iterator, + Mapping, + Sequence, + TypeVar, + Union, +) + +from pip._vendor.resolvelib.providers import AbstractProvider + +from .base import Candidate, Constraint, Requirement +from .candidates import REQUIRES_PYTHON_IDENTIFIER +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.providers import Preference + from pip._vendor.resolvelib.resolvers import RequirementInformation + + PreferenceInformation = RequirementInformation[Requirement, Candidate] + + _ProviderBase = AbstractProvider[Requirement, Candidate, str] +else: + _ProviderBase = AbstractProvider + +# Notes on the relationship between the provider, the factory, and the +# candidate and requirement classes. +# +# The provider is a direct implementation of the resolvelib class. Its role +# is to deliver the API that resolvelib expects. +# +# Rather than work with completely abstract "requirement" and "candidate" +# concepts as resolvelib does, pip has concrete classes implementing these two +# ideas. The API of Requirement and Candidate objects are defined in the base +# classes, but essentially map fairly directly to the equivalent provider +# methods. In particular, `find_matches` and `is_satisfied_by` are +# requirement methods, and `get_dependencies` is a candidate method. +# +# The factory is the interface to pip's internal mechanisms. It is stateless, +# and is created by the resolver and held as a property of the provider. It is +# responsible for creating Requirement and Candidate objects, and provides +# services to those objects (access to pip's finder and preparer). + + +D = TypeVar("D") +V = TypeVar("V") + + +def _get_with_identifier( + mapping: Mapping[str, V], + identifier: str, + default: D, +) -> Union[D, V]: + """Get item from a package name lookup mapping with a resolver identifier. + + This extra logic is needed when the target mapping is keyed by package + name, which cannot be directly looked up with an identifier (which may + contain requested extras). Additional logic is added to also look up a value + by "cleaning up" the extras from the identifier. + """ + if identifier in mapping: + return mapping[identifier] + # HACK: Theoretically we should check whether this identifier is a valid + # "NAME[EXTRAS]" format, and parse out the name part with packaging or + # some regular expression. But since pip's resolver only spits out three + # kinds of identifiers: normalized PEP 503 names, normalized names plus + # extras, and Requires-Python, we can cheat a bit here. + name, open_bracket, _ = identifier.partition("[") + if open_bracket and name in mapping: + return mapping[name] + return default + + +class PipProvider(_ProviderBase): + """Pip's provider implementation for resolvelib. + + :params constraints: A mapping of constraints specified by the user. Keys + are canonicalized project names. + :params ignore_dependencies: Whether the user specified ``--no-deps``. + :params upgrade_strategy: The user-specified upgrade strategy. + :params user_requested: A set of canonicalized package names that the user + supplied for pip to install/upgrade. + """ + + def __init__( + self, + factory: Factory, + constraints: Dict[str, Constraint], + ignore_dependencies: bool, + upgrade_strategy: str, + user_requested: Dict[str, int], + ) -> None: + self._factory = factory + self._constraints = constraints + self._ignore_dependencies = ignore_dependencies + self._upgrade_strategy = upgrade_strategy + self._user_requested = user_requested + self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf) + + def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str: + return requirement_or_candidate.name + + def get_preference( # type: ignore + self, + identifier: str, + resolutions: Mapping[str, Candidate], + candidates: Mapping[str, Iterator[Candidate]], + information: Mapping[str, Iterable["PreferenceInformation"]], + backtrack_causes: Sequence["PreferenceInformation"], + ) -> "Preference": + """Produce a sort key for given requirement based on preference. + + The lower the return value is, the more preferred this group of + arguments is. + + Currently pip considers the followings in order: + + * Prefer if any of the known requirements is "direct", e.g. points to an + explicit URL. + * If equal, prefer if any requirement is "pinned", i.e. contains + operator ``===`` or ``==``. + * If equal, calculate an approximate "depth" and resolve requirements + closer to the user-specified requirements first. + * Order user-specified requirements by the order they are specified. + * If equal, prefers "non-free" requirements, i.e. contains at least one + operator, such as ``>=`` or ``<``. + * If equal, order alphabetically for consistency (helps debuggability). + """ + lookups = (r.get_candidate_lookup() for r, _ in information[identifier]) + candidate, ireqs = zip(*lookups) + operators = [ + specifier.operator + for specifier_set in (ireq.specifier for ireq in ireqs if ireq) + for specifier in specifier_set + ] + + direct = candidate is not None + pinned = any(op[:2] == "==" for op in operators) + unfree = bool(operators) + + try: + requested_order: Union[int, float] = self._user_requested[identifier] + except KeyError: + requested_order = math.inf + parent_depths = ( + self._known_depths[parent.name] if parent is not None else 0.0 + for _, parent in information[identifier] + ) + inferred_depth = min(d for d in parent_depths) + 1.0 + else: + inferred_depth = 1.0 + self._known_depths[identifier] = inferred_depth + + requested_order = self._user_requested.get(identifier, math.inf) + + # Requires-Python has only one candidate and the check is basically + # free, so we always do it first to avoid needless work if it fails. + requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER + + # HACK: Setuptools have a very long and solid backward compatibility + # track record, and extremely few projects would request a narrow, + # non-recent version range of it since that would break a lot things. + # (Most projects specify it only to request for an installer feature, + # which does not work, but that's another topic.) Intentionally + # delaying Setuptools helps reduce branches the resolver has to check. + # This serves as a temporary fix for issues like "apache-airflow[all]" + # while we work on "proper" branch pruning techniques. + delay_this = identifier == "setuptools" + + # Prefer the causes of backtracking on the assumption that the problem + # resolving the dependency tree is related to the failures that caused + # the backtracking + backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes) + + return ( + not requires_python, + delay_this, + not direct, + not pinned, + not backtrack_cause, + inferred_depth, + requested_order, + not unfree, + identifier, + ) + + def find_matches( + self, + identifier: str, + requirements: Mapping[str, Iterator[Requirement]], + incompatibilities: Mapping[str, Iterator[Candidate]], + ) -> Iterable[Candidate]: + def _eligible_for_upgrade(identifier: str) -> bool: + """Are upgrades allowed for this project? + + This checks the upgrade strategy, and whether the project was one + that the user specified in the command line, in order to decide + whether we should upgrade if there's a newer version available. + + (Note that we don't need access to the `--upgrade` flag, because + an upgrade strategy of "to-satisfy-only" means that `--upgrade` + was not specified). + """ + if self._upgrade_strategy == "eager": + return True + elif self._upgrade_strategy == "only-if-needed": + user_order = _get_with_identifier( + self._user_requested, + identifier, + default=None, + ) + return user_order is not None + return False + + constraint = _get_with_identifier( + self._constraints, + identifier, + default=Constraint.empty(), + ) + return self._factory.find_candidates( + identifier=identifier, + requirements=requirements, + constraint=constraint, + prefers_installed=(not _eligible_for_upgrade(identifier)), + incompatibilities=incompatibilities, + ) + + def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: + return requirement.is_satisfied_by(candidate) + + def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]: + with_requires = not self._ignore_dependencies + return [r for r in candidate.iter_dependencies(with_requires) if r is not None] + + @staticmethod + def is_backtrack_cause( + identifier: str, backtrack_causes: Sequence["PreferenceInformation"] + ) -> bool: + for backtrack_cause in backtrack_causes: + if identifier == backtrack_cause.requirement.name: + return True + if backtrack_cause.parent and identifier == backtrack_cause.parent.name: + return True + return False diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py new file mode 100644 index 000000000..6ced5329b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py @@ -0,0 +1,68 @@ +from collections import defaultdict +from logging import getLogger +from typing import Any, DefaultDict + +from pip._vendor.resolvelib.reporters import BaseReporter + +from .base import Candidate, Requirement + +logger = getLogger(__name__) + + +class PipReporter(BaseReporter): + def __init__(self) -> None: + self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int) + + self._messages_at_backtrack = { + 1: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 8: ( + "pip is looking at multiple versions of {package_name} to " + "determine which version is compatible with other " + "requirements. This could take a while." + ), + 13: ( + "This is taking longer than usual. You might need to provide " + "the dependency resolver with stricter constraints to reduce " + "runtime. See https://pip.pypa.io/warnings/backtracking for " + "guidance. If you want to abort this run, press Ctrl + C." + ), + } + + def backtracking(self, candidate: Candidate) -> None: + self.backtracks_by_package[candidate.name] += 1 + + count = self.backtracks_by_package[candidate.name] + if count not in self._messages_at_backtrack: + return + + message = self._messages_at_backtrack[count] + logger.info("INFO: %s", message.format(package_name=candidate.name)) + + +class PipDebuggingReporter(BaseReporter): + """A reporter that does an info log for every event it sees.""" + + def starting(self) -> None: + logger.info("Reporter.starting()") + + def starting_round(self, index: int) -> None: + logger.info("Reporter.starting_round(%r)", index) + + def ending_round(self, index: int, state: Any) -> None: + logger.info("Reporter.ending_round(%r, state)", index) + + def ending(self, state: Any) -> None: + logger.info("Reporter.ending(%r)", state) + + def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None: + logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) + + def backtracking(self, candidate: Candidate) -> None: + logger.info("Reporter.backtracking(%r)", candidate) + + def pinning(self, candidate: Candidate) -> None: + logger.info("Reporter.pinning(%r)", candidate) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/requirements.py new file mode 100644 index 000000000..f561f1f1e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -0,0 +1,166 @@ +from pip._vendor.packaging.specifiers import SpecifierSet +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pip._internal.req.req_install import InstallRequirement + +from .base import Candidate, CandidateLookup, Requirement, format_name + + +class ExplicitRequirement(Requirement): + def __init__(self, candidate: Candidate) -> None: + self.candidate = candidate + + def __str__(self) -> str: + return str(self.candidate) + + def __repr__(self) -> str: + return "{class_name}({candidate!r})".format( + class_name=self.__class__.__name__, + candidate=self.candidate, + ) + + @property + def project_name(self) -> NormalizedName: + # No need to canonicalize - the candidate did this + return self.candidate.project_name + + @property + def name(self) -> str: + # No need to canonicalize - the candidate did this + return self.candidate.name + + def format_for_error(self) -> str: + return self.candidate.format_for_error() + + def get_candidate_lookup(self) -> CandidateLookup: + return self.candidate, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return candidate == self.candidate + + +class SpecifierRequirement(Requirement): + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = ireq + self._extras = frozenset(ireq.extras) + + def __str__(self) -> str: + return str(self._ireq.req) + + def __repr__(self) -> str: + return "{class_name}({requirement!r})".format( + class_name=self.__class__.__name__, + requirement=str(self._ireq.req), + ) + + @property + def project_name(self) -> NormalizedName: + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + return canonicalize_name(self._ireq.req.name) + + @property + def name(self) -> str: + return format_name(self.project_name, self._extras) + + def format_for_error(self) -> str: + + # Convert comma-separated specifiers into "A, B, ..., F and G" + # This makes the specifier a bit more "human readable", without + # risking a change in meaning. (Hopefully! Not all edge cases have + # been checked) + parts = [s.strip() for s in str(self).split(",")] + if len(parts) == 0: + return "" + elif len(parts) == 1: + return parts[0] + + return ", ".join(parts[:-1]) + " and " + parts[-1] + + def get_candidate_lookup(self) -> CandidateLookup: + return None, self._ireq + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self.name, ( + f"Internal issue: Candidate is not for this requirement " + f"{candidate.name} vs {self.name}" + ) + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + assert self._ireq.req, "Specifier-backed ireq is always PEP 508" + spec = self._ireq.req.specifier + return spec.contains(candidate.version, prereleases=True) + + +class RequiresPythonRequirement(Requirement): + """A requirement representing Requires-Python metadata.""" + + def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: + self.specifier = specifier + self._candidate = match + + def __str__(self) -> str: + return f"Python {self.specifier}" + + def __repr__(self) -> str: + return "{class_name}({specifier!r})".format( + class_name=self.__class__.__name__, + specifier=str(self.specifier), + ) + + @property + def project_name(self) -> NormalizedName: + return self._candidate.project_name + + @property + def name(self) -> str: + return self._candidate.name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + if self.specifier.contains(self._candidate.version, prereleases=True): + return self._candidate, None + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + assert candidate.name == self._candidate.name, "Not Python candidate" + # We can safely always allow prereleases here since PackageFinder + # already implements the prerelease logic, and would have filtered out + # prerelease candidates if the user does not expect them. + return self.specifier.contains(candidate.version, prereleases=True) + + +class UnsatisfiableRequirement(Requirement): + """A requirement that cannot be satisfied.""" + + def __init__(self, name: NormalizedName) -> None: + self._name = name + + def __str__(self) -> str: + return f"{self._name} (unavailable)" + + def __repr__(self) -> str: + return "{class_name}({name!r})".format( + class_name=self.__class__.__name__, + name=str(self._name), + ) + + @property + def project_name(self) -> NormalizedName: + return self._name + + @property + def name(self) -> str: + return self._name + + def format_for_error(self) -> str: + return str(self) + + def get_candidate_lookup(self) -> CandidateLookup: + return None, None + + def is_satisfied_by(self, candidate: Candidate) -> bool: + return False diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py new file mode 100644 index 000000000..618f1e1ae --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -0,0 +1,292 @@ +import functools +import logging +import os +from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible +from pip._vendor.resolvelib import Resolver as RLResolver +from pip._vendor.resolvelib.structs import DirectedGraph + +from pip._internal.cache import WheelCache +from pip._internal.index.package_finder import PackageFinder +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_set import RequirementSet +from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pip._internal.resolution.resolvelib.provider import PipProvider +from pip._internal.resolution.resolvelib.reporter import ( + PipDebuggingReporter, + PipReporter, +) + +from .base import Candidate, Requirement +from .factory import Factory + +if TYPE_CHECKING: + from pip._vendor.resolvelib.resolvers import Result as RLResult + + Result = RLResult[Requirement, Candidate, str] + + +logger = logging.getLogger(__name__) + + +class Resolver(BaseResolver): + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + suppress_build_failures: bool, + py_version_info: Optional[Tuple[int, ...]] = None, + ): + super().__init__() + assert upgrade_strategy in self._allowed_strategies + + self.factory = Factory( + finder=finder, + preparer=preparer, + make_install_req=make_install_req, + wheel_cache=wheel_cache, + use_user_site=use_user_site, + force_reinstall=force_reinstall, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + suppress_build_failures=suppress_build_failures, + py_version_info=py_version_info, + ) + self.ignore_dependencies = ignore_dependencies + self.upgrade_strategy = upgrade_strategy + self._result: Optional[Result] = None + + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: + collected = self.factory.collect_root_requirements(root_reqs) + provider = PipProvider( + factory=self.factory, + constraints=collected.constraints, + ignore_dependencies=self.ignore_dependencies, + upgrade_strategy=self.upgrade_strategy, + user_requested=collected.user_requested, + ) + if "PIP_RESOLVER_DEBUG" in os.environ: + reporter: BaseReporter = PipDebuggingReporter() + else: + reporter = PipReporter() + resolver: RLResolver[Requirement, Candidate, str] = RLResolver( + provider, + reporter, + ) + + try: + try_to_avoid_resolution_too_deep = 2000000 + result = self._result = resolver.resolve( + collected.requirements, max_rounds=try_to_avoid_resolution_too_deep + ) + + except ResolutionImpossible as e: + error = self.factory.get_installation_error( + cast("ResolutionImpossible[Requirement, Candidate]", e), + collected.constraints, + ) + raise error from e + + req_set = RequirementSet(check_supported_wheels=check_supported_wheels) + for candidate in result.mapping.values(): + ireq = candidate.get_install_requirement() + if ireq is None: + continue + + # Check if there is already an installation under the same name, + # and set a flag for later stages to uninstall it, if needed. + installed_dist = self.factory.get_dist_to_uninstall(candidate) + if installed_dist is None: + # There is no existing installation -- nothing to uninstall. + ireq.should_reinstall = False + elif self.factory.force_reinstall: + # The --force-reinstall flag is set -- reinstall. + ireq.should_reinstall = True + elif installed_dist.version != candidate.version: + # The installation is different in version -- reinstall. + ireq.should_reinstall = True + elif candidate.is_editable or installed_dist.editable: + # The incoming distribution is editable, or different in + # editable-ness to installation -- reinstall. + ireq.should_reinstall = True + elif candidate.source_link and candidate.source_link.is_file: + # The incoming distribution is under file:// + if candidate.source_link.is_wheel: + # is a local wheel -- do nothing. + logger.info( + "%s is already installed with the same version as the " + "provided wheel. Use --force-reinstall to force an " + "installation of the wheel.", + ireq.name, + ) + continue + + # is a local sdist or path -- reinstall + ireq.should_reinstall = True + else: + continue + + link = candidate.source_link + if link and link.is_yanked: + # The reason can contain non-ASCII characters, Unicode + # is required for Python 2. + msg = ( + "The candidate selected for download or install is a " + "yanked version: {name!r} candidate (version {version} " + "at {link})\nReason for being yanked: {reason}" + ).format( + name=candidate.name, + version=candidate.version, + link=link, + reason=link.yanked_reason or "", + ) + logger.warning(msg) + + req_set.add_named_requirement(ireq) + + reqs = req_set.all_requirements + self.factory.preparer.prepare_linked_requirements_more(reqs) + return req_set + + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: + """Get order for installation of requirements in RequirementSet. + + The returned list contains a requirement before another that depends on + it. This helps ensure that the environment is kept consistent as they + get installed one-by-one. + + The current implementation creates a topological ordering of the + dependency graph, giving more weight to packages with less + or no dependencies, while breaking any cycles in the graph at + arbitrary points. We make no guarantees about where the cycle + would be broken, other than it *would* be broken. + """ + assert self._result is not None, "must call resolve() first" + + if not req_set.requirements: + # Nothing is left to install, so we do not need an order. + return [] + + graph = self._result.graph + weights = get_topological_weights( + graph, + expected_node_count=len(self._result.mapping) + 1, + ) + + sorted_items = sorted( + req_set.requirements.items(), + key=functools.partial(_req_set_item_sorter, weights=weights), + reverse=True, + ) + return [ireq for _, ireq in sorted_items] + + +def get_topological_weights( + graph: "DirectedGraph[Optional[str]]", expected_node_count: int +) -> Dict[Optional[str], int]: + """Assign weights to each node based on how "deep" they are. + + This implementation may change at any point in the future without prior + notice. + + We first simplify the dependency graph by pruning any leaves and giving them + the highest weight: a package without any dependencies should be installed + first. This is done again and again in the same way, giving ever less weight + to the newly found leaves. The loop stops when no leaves are left: all + remaining packages have at least one dependency left in the graph. + + Then we continue with the remaining graph, by taking the length for the + longest path to any node from root, ignoring any paths that contain a single + node twice (i.e. cycles). This is done through a depth-first search through + the graph, while keeping track of the path to the node. + + Cycles in the graph result would result in node being revisited while also + being on its own path. In this case, take no action. This helps ensure we + don't get stuck in a cycle. + + When assigning weight, the longer path (i.e. larger length) is preferred. + """ + path: Set[Optional[str]] = set() + weights: Dict[Optional[str], int] = {} + + def visit(node: Optional[str]) -> None: + if node in path: + # We hit a cycle, so we'll break it here. + return + + # Time to visit the children! + path.add(node) + for child in graph.iter_children(node): + visit(child) + path.remove(node) + + last_known_parent_count = weights.get(node, 0) + weights[node] = max(last_known_parent_count, len(path)) + + # Simplify the graph, pruning leaves that have no dependencies. + # This is needed for large graphs (say over 200 packages) because the + # `visit` function is exponentially slower then, taking minutes. + # See https://github.com/pypa/pip/issues/10557 + # We will loop until we explicitly break the loop. + while True: + leaves = set() + for key in graph: + if key is None: + continue + for _child in graph.iter_children(key): + # This means we have at least one child + break + else: + # No child. + leaves.add(key) + if not leaves: + # We are done simplifying. + break + # Calculate the weight for the leaves. + weight = len(graph) - 1 + for leaf in leaves: + weights[leaf] = weight + # Remove the leaves from the graph, making it simpler. + for leaf in leaves: + graph.remove(leaf) + + # Visit the remaining graph. + # `None` is guaranteed to be the root node by resolvelib. + visit(None) + + # Sanity checks + assert weights[None] == 0 + assert len(weights) == expected_node_count + + return weights + + +def _req_set_item_sorter( + item: Tuple[str, InstallRequirement], + weights: Dict[Optional[str], int], +) -> Tuple[int, str]: + """Key function used to sort install requirements for installation. + + Based on the "weight" mapping calculated in ``get_installation_order()``. + The canonical package name is returned as the second member as a tie- + breaker to ensure the result is predictable, which is useful in tests. + """ + name = canonicalize_name(item[0]) + return weights[name], name diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/self_outdated_check.py b/myenv/lib/python3.10/site-packages/pip/_internal/self_outdated_check.py new file mode 100644 index 000000000..7300e0ea4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/self_outdated_check.py @@ -0,0 +1,189 @@ +import datetime +import hashlib +import json +import logging +import optparse +import os.path +import sys +from typing import Any, Dict + +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import get_default_environment +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.session import PipSession +from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace +from pip._internal.utils.misc import ensure_dir + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +def _get_statefile_name(key: str) -> str: + key_bytes = key.encode() + name = hashlib.sha224(key_bytes).hexdigest() + return name + + +class SelfCheckState: + def __init__(self, cache_dir: str) -> None: + self.state: Dict[str, Any] = {} + self.statefile_path = None + + # Try to load the existing state + if cache_dir: + self.statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) + try: + with open(self.statefile_path, encoding="utf-8") as statefile: + self.state = json.load(statefile) + except (OSError, ValueError, KeyError): + # Explicitly suppressing exceptions, since we don't want to + # error out if the cache file is invalid. + pass + + @property + def key(self) -> str: + return sys.prefix + + def save(self, pypi_version: str, current_time: datetime.datetime) -> None: + # If we do not have a path to cache in, don't bother saving. + if not self.statefile_path: + return + + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) + + with adjacent_tmp_file(self.statefile_path) as f: + f.write(text.encode()) + + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self.statefile_path) + except OSError: + # Best effort. + pass + + +def was_installed_by_pip(pkg: str) -> bool: + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + dist = get_default_environment().get_distribution(pkg) + return dist is not None and "pip" == dist.installer + + +def pip_self_version_check(session: PipSession, options: optparse.Values) -> None: + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_dist = get_default_environment().get_distribution("pip") + if not installed_dist: + return + + pip_version = installed_dist.version + pypi_version = None + + try: + state = SelfCheckState(cache_dir=options.cache_dir) + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], SELFCHECK_DATE_FMT + ) + if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + # Lets use PackageFinder to see what the latest pip version is + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + use_deprecated_html5lib=( + "html5lib" in options.deprecated_features_enabled + ), + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + pypi_version = str(best_candidate.version) + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = parse_version(pypi_version) + + local_version_is_older = ( + pip_version < remote_version + and pip_version.base_version != remote_version.base_version + and was_installed_by_pip("pip") + ) + + # Determine if our pypi_version is older + if not local_version_is_older: + return + + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable` on purpose as it is not possible to do it + # correctly without knowing the user's shell. Thus, + # it won't be done until possible through the standard library. + # Do not be tempted to use the undocumented subprocess.list2cmdline. + # It is considered an internal implementation detail for a reason. + pip_cmd = f"{sys.executable} -m pip" + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, + pypi_version, + pip_cmd, + ) + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..35cdadfe5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc new file mode 100644 index 000000000..96d1c7713 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc new file mode 100644 index 000000000..70cb73aa7 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc new file mode 100644 index 000000000..5a1ceb2d2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc new file mode 100644 index 000000000..a93782f83 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-310.pyc new file mode 100644 index 000000000..977d2c3b0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc new file mode 100644 index 000000000..63968bf7e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc new file mode 100644 index 000000000..00b49612b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc new file mode 100644 index 000000000..adcab7525 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-310.pyc new file mode 100644 index 000000000..89fdd99a4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc new file mode 100644 index 000000000..82957c58a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc new file mode 100644 index 000000000..06d73c0ab Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc new file mode 100644 index 000000000..6bf724d59 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc new file mode 100644 index 000000000..de2e2ffc9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc new file mode 100644 index 000000000..6ad45dbab Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-310.pyc new file mode 100644 index 000000000..fa1e09840 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc new file mode 100644 index 000000000..6e79904fd Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc new file mode 100644 index 000000000..7ce758a35 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/misc.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/misc.cpython-310.pyc new file mode 100644 index 000000000..dc99eeaf3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/misc.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/models.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/models.cpython-310.pyc new file mode 100644 index 000000000..4cdabb558 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/models.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc new file mode 100644 index 000000000..9df6da07a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc new file mode 100644 index 000000000..17bbf1751 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc new file mode 100644 index 000000000..ffa49a835 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc new file mode 100644 index 000000000..aef4ad532 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc new file mode 100644 index 000000000..fccc1d324 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/urls.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/urls.cpython-310.pyc new file mode 100644 index 000000000..e769413d2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/urls.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc new file mode 100644 index 000000000..f5d45ddd0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..59e8828c2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/_log.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/_log.py new file mode 100644 index 000000000..92c4c6a19 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/_log.py @@ -0,0 +1,38 @@ +"""Customize logging + +Defines custom logger class for the `logger.verbose(...)` method. + +init_logging() must be called before any other modules that call logging.getLogger. +""" + +import logging +from typing import Any, cast + +# custom log level for `--verbose` output +# between DEBUG and INFO +VERBOSE = 15 + + +class VerboseLogger(logging.Logger): + """Custom Logger, defining a verbose log-level + + VERBOSE is between INFO and DEBUG. + """ + + def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None: + return self.log(VERBOSE, msg, *args, **kwargs) + + +def getLogger(name: str) -> VerboseLogger: + """logging.getLogger, but ensures our VerboseLogger class is returned""" + return cast(VerboseLogger, logging.getLogger(name)) + + +def init_logging() -> None: + """Register our VerboseLogger and VERBOSE log level. + + Should be called before any calls to getLogger(), + i.e. in pip._internal.__init__ + """ + logging.setLoggerClass(VerboseLogger) + logging.addLevelName(VERBOSE, "VERBOSE") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 000000000..16933bf8a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,52 @@ +""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" + +import os +import sys +from typing import List + +from pip._vendor import platformdirs as _appdirs + + +def user_cache_dir(appname: str) -> str: + return _appdirs.user_cache_dir(appname, appauthor=False) + + +def _macos_user_config_dir(appname: str, roaming: bool = True) -> str: + # Use ~/Application Support/pip, if the directory exists. + path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) + if os.path.isdir(path): + return path + + # Use a Linux-like ~/.config/pip, by default. + linux_like_path = "~/.config/" + if appname: + linux_like_path = os.path.join(linux_like_path, appname) + + return os.path.expanduser(linux_like_path) + + +def user_config_dir(appname: str, roaming: bool = True) -> str: + if sys.platform == "darwin": + return _macos_user_config_dir(appname, roaming) + + return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + + +# for the discussion regarding site_config_dir locations +# see +def site_config_dirs(appname: str) -> List[str]: + if sys.platform == "darwin": + return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)] + + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if sys.platform == "win32": + return [dirval] + + # Unix-y system. Look in /etc as well. + return dirval.split(os.pathsep) + ["/etc"] diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/compat.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/compat.py new file mode 100644 index 000000000..3f4d300ce --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/compat.py @@ -0,0 +1,63 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +import logging +import os +import sys + +__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"] + + +logger = logging.getLogger(__name__) + + +def has_tls() -> bool: + try: + import _ssl # noqa: F401 # ignore unused + + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + + return IS_PYOPENSSL + + +def get_path_uid(path: str) -> int: + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, "O_NOFOLLOW"): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError(f"{path} is a symlink; Will not return uid for symlinks") + return file_uid + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py new file mode 100644 index 000000000..b6ed9a78e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py @@ -0,0 +1,165 @@ +"""Generate and work with PEP 425 Compatibility Tags. +""" + +import re +from typing import List, Optional, Tuple + +from pip._vendor.packaging.tags import ( + PythonVersion, + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") + + +def version_info_to_nodot(version_info: Tuple[int, ...]) -> str: + # Only use up to the first two numbers. + return "".join(map(str, version_info[:2])) + + +def _mac_platforms(arch: str) -> List[str]: + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + "{}_{}".format(name, arch[len("macosx_") :]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch: str) -> List[str]: + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch_prefix == "manylinux2014": + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {"i686", "x86_64"}: + arches.append("manylinux2010" + arch_sep + arch_suffix) + arches.append("manylinux1" + arch_sep + arch_suffix) + elif arch_prefix == "manylinux2010": + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append("manylinux1" + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch: str) -> List[str]: + arch_prefix, arch_sep, arch_suffix = arch.partition("_") + if arch.startswith("macosx"): + arches = _mac_platforms(arch) + elif arch_prefix in ["manylinux2014", "manylinux2010"]: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]: + if not platforms: + return None + + seen = set() + result = [] + + for p in platforms: + if p in seen: + continue + additions = [c for c in _get_custom_platforms(p) if c not in seen] + seen.update(additions) + result.extend(additions) + + return result + + +def _get_python_version(version: str) -> PythonVersion: + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter( + implementation: Optional[str] = None, version: Optional[str] = None +) -> str: + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return f"{implementation}{version}" + + +def get_supported( + version: Optional[str] = None, + platforms: Optional[List[str]] = None, + impl: Optional[str] = None, + abis: Optional[List[str]] = None, +) -> List[Tag]: + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify a list of platforms you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abis: specify a list of abis you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported: List[Tag] = [] + + python_version: Optional[PythonVersion] = None + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + platforms = _expand_allowed_platforms(platforms) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/datetime.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/datetime.py new file mode 100644 index 000000000..8668b3b0e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/datetime.py @@ -0,0 +1,11 @@ +"""For when pip wants to check the date or time. +""" + +import datetime + + +def today_is_later_than(year: int, month: int, day: int) -> bool: + today = datetime.date.today() + given = datetime.date(year, month, day) + + return today > given diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py new file mode 100644 index 000000000..72bd6f25a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py @@ -0,0 +1,120 @@ +""" +A module that implements tooling to enable easy warnings about deprecations. +""" + +import logging +import warnings +from typing import Any, Optional, TextIO, Type, Union + +from pip._vendor.packaging.version import parse + +from pip import __version__ as current_version # NOTE: tests patch this name. + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " + + +class PipDeprecationWarning(Warning): + pass + + +_original_showwarning: Any = None + + +# Warnings <-> Logging Integration +def _showwarning( + message: Union[Warning, str], + category: Type[Warning], + filename: str, + lineno: int, + file: Optional[TextIO] = None, + line: Optional[str] = None, +) -> None: + if file is not None: + if _original_showwarning is not None: + _original_showwarning(message, category, filename, lineno, file, line) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) + else: + _original_showwarning(message, category, filename, lineno, file, line) + + +def install_warning_logger() -> None: + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _original_showwarning + + if _original_showwarning is None: + _original_showwarning = warnings.showwarning + warnings.showwarning = _showwarning + + +def deprecated( + *, + reason: str, + replacement: Optional[str], + gone_in: Optional[str], + feature_flag: Optional[str] = None, + issue: Optional[int] = None, +) -> None: + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. Should be a complete sentence. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises an error if pip's current version is greater than or equal to + this. + feature_flag: + Command-line flag of the form --use-feature={feature_flag} for testing + upcoming functionality. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + """ + + # Determine whether or not the feature is already gone in this version. + is_gone = gone_in is not None and parse(current_version) >= parse(gone_in) + + message_parts = [ + (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"), + ( + gone_in, + "pip {} will enforce this behaviour change." + if not is_gone + else "Since pip {}, this is no longer supported.", + ), + ( + replacement, + "A possible replacement is {}.", + ), + ( + feature_flag, + "You can use the flag --use-feature={} to test the upcoming behaviour." + if not is_gone + else None, + ), + ( + issue, + "Discussion can be found at https://github.com/pypa/pip/issues/{}", + ), + ] + + message = " ".join( + format_str.format(value) + for value, format_str in message_parts + if format_str is not None and value is not None + ) + + # Raise as an error if this behaviour is deprecated. + if is_gone: + raise PipDeprecationWarning(message) + + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py new file mode 100644 index 000000000..0e8e5e160 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -0,0 +1,87 @@ +from typing import Optional + +from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo +from pip._internal.models.link import Link +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + + +def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str: + """Convert a DirectUrl to a pip requirement string.""" + direct_url.validate() # if invalid, this is a pip bug + requirement = name + " @ " + fragments = [] + if isinstance(direct_url.info, VcsInfo): + requirement += "{}+{}@{}".format( + direct_url.info.vcs, direct_url.url, direct_url.info.commit_id + ) + elif isinstance(direct_url.info, ArchiveInfo): + requirement += direct_url.url + if direct_url.info.hash: + fragments.append(direct_url.info.hash) + else: + assert isinstance(direct_url.info, DirInfo) + requirement += direct_url.url + if direct_url.subdirectory: + fragments.append("subdirectory=" + direct_url.subdirectory) + if fragments: + requirement += "#" + "&".join(fragments) + return requirement + + +def direct_url_for_editable(source_dir: str) -> DirectUrl: + return DirectUrl( + url=path_to_url(source_dir), + info=DirInfo(editable=True), + ) + + +def direct_url_from_link( + link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False +) -> DirectUrl: + if link.is_vcs: + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend + url, requested_revision, _ = vcs_backend.get_url_rev_and_auth( + link.url_without_fragment + ) + # For VCS links, we need to find out and add commit_id. + if link_is_in_wheel_cache: + # If the requested VCS link corresponds to a cached + # wheel, it means the requested revision was an + # immutable commit hash, otherwise it would not have + # been cached. In that case we don't have a source_dir + # with the VCS checkout. + assert requested_revision + commit_id = requested_revision + else: + # If the wheel was not in cache, it means we have + # had to checkout from VCS to build and we have a source_dir + # which we can inspect to find out the commit id. + assert source_dir + commit_id = vcs_backend.get_revision(source_dir) + return DirectUrl( + url=url, + info=VcsInfo( + vcs=vcs_backend.name, + commit_id=commit_id, + requested_revision=requested_revision, + ), + subdirectory=link.subdirectory_fragment, + ) + elif link.is_existing_dir(): + return DirectUrl( + url=link.url_without_fragment, + info=DirInfo(), + subdirectory=link.subdirectory_fragment, + ) + else: + hash = None + hash_name = link.hash_name + if hash_name: + hash = f"{hash_name}={link.hash}" + return DirectUrl( + url=link.url_without_fragment, + info=ArchiveInfo(hash=hash), + subdirectory=link.subdirectory_fragment, + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/distutils_args.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/distutils_args.py new file mode 100644 index 000000000..e4aa5b827 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/distutils_args.py @@ -0,0 +1,42 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt +from typing import Dict, List + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args: List[str]) -> Dict[str, str]: + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py new file mode 100644 index 000000000..9e0da8d2d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py @@ -0,0 +1,75 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import re +import sys +from typing import Optional + +from pip._internal.locations import site_packages, user_site +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) + +__all__ = [ + "egg_link_path_from_sys_path", + "egg_link_path_from_location", +] + + +def _egg_link_name(raw_name: str) -> str: + """ + Convert a Name metadata value to a .egg-link name, by applying + the same substitution as pkg_resources's safe_name function. + Note: we cannot use canonicalize_name because it has a different logic. + """ + return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link" + + +def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]: + """ + Look for a .egg-link file for project name, by walking sys.path. + """ + egg_link_name = _egg_link_name(raw_name) + for path_item in sys.path: + egg_link = os.path.join(path_item, egg_link_name) + if os.path.isfile(egg_link): + return egg_link + return None + + +def egg_link_path_from_location(raw_name: str) -> Optional[str]: + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + egg_link_name = _egg_link_name(raw_name) + for site in sites: + egglink = os.path.join(site, egg_link_name) + if os.path.isfile(egglink): + return egglink + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/encoding.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/encoding.py new file mode 100644 index 000000000..1c73f6c9a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/encoding.py @@ -0,0 +1,36 @@ +import codecs +import locale +import re +import sys +from typing import List, Tuple + +BOMS: List[Tuple[bytes, str]] = [ + (codecs.BOM_UTF8, "utf-8"), + (codecs.BOM_UTF16, "utf-16"), + (codecs.BOM_UTF16_BE, "utf-16-be"), + (codecs.BOM_UTF16_LE, "utf-16-le"), + (codecs.BOM_UTF32, "utf-32"), + (codecs.BOM_UTF32_BE, "utf-32-be"), + (codecs.BOM_UTF32_LE, "utf-32-le"), +] + +ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)") + + +def auto_decode(data: bytes) -> str: + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom) :].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b"\n")[:2]: + if line[0:1] == b"#" and ENCODING_RE.search(line): + result = ENCODING_RE.search(line) + assert result is not None + encoding = result.groups()[0].decode("ascii") + return data.decode(encoding) + return data.decode( + locale.getpreferredencoding(False) or sys.getdefaultencoding(), + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/entrypoints.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 000000000..1504a1291 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,27 @@ +import sys +from typing import List, Optional + +from pip._internal.cli.main import main + + +def _wrapper(args: Optional[List[str]] = None) -> int: + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/filesystem.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 000000000..b7e6191ab --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,182 @@ +import fnmatch +import os +import os.path +import random +import shutil +import stat +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile +from typing import Any, BinaryIO, Iterator, List, Union, cast + +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.misc import format_size + + +def check_path_owner(path: str) -> bool: + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if sys.platform == "win32" or not hasattr(os, "geteuid"): + return True + + assert os.path.isabs(path) + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) + return False # assume we don't own the path + + +def copy2_fixed(src: str, dest: str) -> None: + """Wrap shutil.copy2() but map errors copying socket files to + SpecialFileError as expected. + + See also https://bugs.python.org/issue37700. + """ + try: + shutil.copy2(src, dest) + except OSError: + for f in [src, dest]: + try: + is_socket_file = is_socket(f) + except OSError: + # An error has already occurred. Another error here is not + # a problem and we can ignore it. + pass + else: + if is_socket_file: + raise shutil.SpecialFileError(f"`{f}` is a socket") + + raise + + +def is_socket(path: str) -> bool: + return stat.S_ISSOCK(os.lstat(path).st_mode) + + +@contextmanager +def adjacent_tmp_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: + """Return a file-like object pointing to a tmp file next to path. + + The file is created securely and is ensured to be written to disk + after the context reaches its end. + + kwargs will be passed to tempfile.NamedTemporaryFile to control + the way the temporary file will be opened. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix=".tmp", + **kwargs, + ) as f: + result = cast(BinaryIO, f) + try: + yield result + finally: + result.flush() + os.fsync(result.fileno()) + + +# Tenacity raises RetryError by default, explicitly raise the original exception +_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25)) + +replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path: str) -> bool: + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == "posix": + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path: str) -> bool: + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = "accesstest_deleteme_fishfingers_custard_" + alphabet = "abcdefghijklmnopqrstuvwxyz0123456789" + for _ in range(10): + name = basename + "".join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except FileExistsError: + pass + except PermissionError: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + # This could as well be because the parent dir is not readable, + # due to non-privileged user access. + return False + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise OSError("Unexpected condition testing for writable directory") + + +def find_files(path: str, pattern: str) -> List[str]: + """Returns a list of absolute paths of files beneath path, recursively, + with filenames which match the UNIX-style shell glob pattern.""" + result: List[str] = [] + for root, _, files in os.walk(path): + matches = fnmatch.filter(files, pattern) + result.extend(os.path.join(root, f) for f in matches) + return result + + +def file_size(path: str) -> Union[int, float]: + # If it's a symlink, return 0. + if os.path.islink(path): + return 0 + return os.path.getsize(path) + + +def format_file_size(path: str) -> str: + return format_size(file_size(path)) + + +def directory_size(path: str) -> Union[int, float]: + size = 0.0 + for root, _dirs, files in os.walk(path): + for filename in files: + file_path = os.path.join(root, filename) + size += file_size(file_path) + return size + + +def format_directory_size(path: str) -> str: + return format_size(directory_size(path)) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/filetypes.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 000000000..594857017 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,27 @@ +"""Filetype information. +""" + +from typing import Tuple + +from pip._internal.utils.misc import splitext + +WHEEL_EXTENSION = ".whl" +BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz") +XZ_EXTENSIONS: Tuple[str, ...] = ( + ".tar.xz", + ".txz", + ".tlz", + ".tar.lz", + ".tar.lzma", +) +ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION) +TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar") +ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS + + +def is_archive_file(name: str) -> bool: + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/glibc.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 000000000..7bd3c2068 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,88 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import os +import sys +from typing import Optional, Tuple + + +def glibc_version_string() -> Optional[str]: + "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr() -> Optional[str]: + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes() -> Optional[str]: + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver() -> Tuple[str, str]: + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/hashes.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 000000000..82eb035a0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,144 @@ +import hashlib +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterator, List + +from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError +from pip._internal.utils.misc import read_chunks + +if TYPE_CHECKING: + from hashlib import _Hash + + # NoReturn introduced in 3.6.2; imported only for type checking to maintain + # pip compatibility with older patch versions of Python 3.6 + from typing import NoReturn + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = "sha256" + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ["sha256", "sha384", "sha512"] + + +class Hashes: + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + + def __init__(self, hashes: Dict[str, List[str]] = None) -> None: + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + allowed = {} + if hashes is not None: + for alg, keys in hashes.items(): + # Make sure values are always sorted (to ease equality checks) + allowed[alg] = sorted(keys) + self._allowed = allowed + + def __and__(self, other: "Hashes") -> "Hashes": + if not isinstance(other, Hashes): + return NotImplemented + + # If either of the Hashes object is entirely empty (i.e. no hash + # specified at all), all hashes from the other object are allowed. + if not other: + return self + if not self: + return other + + # Otherwise only hashes that present in both objects are allowed. + new = {} + for alg, values in other._allowed.items(): + if alg not in self._allowed: + continue + new[alg] = [v for v in values if v in self._allowed[alg]] + return Hashes(new) + + @property + def digest_count(self) -> int: + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool: + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + + def check_against_chunks(self, chunks: Iterator[bytes]) -> None: + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in self._allowed.keys(): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError(f"Unknown hash name: {hash_name}") + + for chunk in chunks: + for hash in gots.values(): + hash.update(chunk) + + for hash_name, got in gots.items(): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file: BinaryIO) -> None: + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path: str) -> None: + with open(path, "rb") as file: + return self.check_against_file(file) + + def __bool__(self) -> bool: + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Hashes): + return NotImplemented + return self._allowed == other._allowed + + def __hash__(self) -> int: + return hash( + ",".join( + sorted( + ":".join((alg, digest)) + for alg, digest_list in self._allowed.items() + for digest in digest_list + ) + ) + ) + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + + def __init__(self) -> None: + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super().__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn": + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/inject_securetransport.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/inject_securetransport.py new file mode 100644 index 000000000..276aa79bb --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/inject_securetransport.py @@ -0,0 +1,35 @@ +"""A helper module that injects SecureTransport, on import. + +The import should be done as early as possible, to ensure all requests and +sessions (or whatever) are created after injecting SecureTransport. + +Note that we only do the injection on macOS, when the linked OpenSSL is too +old to handle TLSv1.2. +""" + +import sys + + +def inject_securetransport() -> None: + # Only relevant on macOS + if sys.platform != "darwin": + return + + try: + import ssl + except ImportError: + return + + # Checks for OpenSSL 1.0.1 + if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: + return + + try: + from pip._vendor.urllib3.contrib import securetransport + except (ImportError, OSError): + return + + securetransport.inject_into_urllib3() + + +inject_securetransport() diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/logging.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 000000000..6e001c5d6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,343 @@ +import contextlib +import errno +import logging +import logging.handlers +import os +import sys +import threading +from dataclasses import dataclass +from logging import Filter +from typing import IO, Any, ClassVar, Iterator, List, Optional, TextIO, Type + +from pip._vendor.rich.console import ( + Console, + ConsoleOptions, + ConsoleRenderable, + RenderResult, +) +from pip._vendor.rich.highlighter import NullHighlighter +from pip._vendor.rich.logging import RichHandler +from pip._vendor.rich.segment import Segment +from pip._vendor.rich.style import Style + +from pip._internal.exceptions import DiagnosticPipError +from pip._internal.utils._log import VERBOSE, getLogger +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX +from pip._internal.utils.misc import ensure_dir + +_log_state = threading.local() +subprocess_logger = getLogger("pip.subprocessor") + + +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + + +def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool: + if exc_class is BrokenPipeError: + return True + + # On Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + if not WINDOWS: + return False + + return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE) + + +@contextlib.contextmanager +def indent_log(num: int = 2) -> Iterator[None]: + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + # For thread-safety + _log_state.indentation = get_indentation() + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation() -> int: + return getattr(_log_state, "indentation", 0) + + +class IndentingFormatter(logging.Formatter): + default_time_format = "%Y-%m-%dT%H:%M:%S" + + def __init__( + self, + *args: Any, + add_timestamp: bool = False, + **kwargs: Any, + ) -> None: + """ + A logging.Formatter that obeys the indent_log() context manager. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = add_timestamp + super().__init__(*args, **kwargs) + + def get_message_start(self, formatted: str, levelno: int) -> str: + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return "" + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return "" + if levelno < logging.ERROR: + return "WARNING: " + + return "ERROR: " + + def format(self, record: logging.LogRecord) -> str: + """ + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. + """ + formatted = super().format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + + prefix = "" + if self.add_timestamp: + prefix = f"{self.formatTime(record)} " + prefix += " " * get_indentation() + formatted = "".join([prefix + line for line in formatted.splitlines(True)]) + return formatted + + +@dataclass +class IndentedRenderable: + renderable: ConsoleRenderable + indent: int + + def __rich_console__( + self, console: Console, options: ConsoleOptions + ) -> RenderResult: + segments = console.render(self.renderable, options) + lines = Segment.split_lines(segments) + for line in lines: + yield Segment(" " * self.indent) + yield from line + yield Segment("\n") + + +class RichPipStreamHandler(RichHandler): + KEYWORDS: ClassVar[Optional[List[str]]] = [] + + def __init__(self, stream: Optional[TextIO], no_color: bool) -> None: + super().__init__( + console=Console(file=stream, no_color=no_color, soft_wrap=True), + show_time=False, + show_level=False, + show_path=False, + highlighter=NullHighlighter(), + ) + + # Our custom override on Rich's logger, to make things work as we need them to. + def emit(self, record: logging.LogRecord) -> None: + style: Optional[Style] = None + + # If we are given a diagnostic error to present, present it with indentation. + if record.msg == "[present-diagnostic] %s" and len(record.args) == 1: + diagnostic_error: DiagnosticPipError = record.args[0] # type: ignore[index] + assert isinstance(diagnostic_error, DiagnosticPipError) + + renderable: ConsoleRenderable = IndentedRenderable( + diagnostic_error, indent=get_indentation() + ) + else: + message = self.format(record) + renderable = self.render_message(record, message) + if record.levelno is not None: + if record.levelno >= logging.ERROR: + style = Style(color="red") + elif record.levelno >= logging.WARNING: + style = Style(color="yellow") + + try: + self.console.print(renderable, overflow="ignore", crop=False, style=style) + except Exception: + self.handleError(record) + + def handleError(self, record: logging.LogRecord) -> None: + """Called when logging is unable to log some output.""" + + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if ( + exc_class + and exc + and self.console.file is sys.stdout + and _is_broken_pipe_error(exc_class, exc) + ): + raise BrokenStdoutLoggingError() + + return super().handleError(record) + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + def _open(self) -> IO[Any]: + ensure_dir(os.path.dirname(self.baseFilename)) + return super()._open() + + +class MaxLevelFilter(Filter): + def __init__(self, level: int) -> None: + self.level = level + + def filter(self, record: logging.LogRecord) -> bool: + return record.levelno < self.level + + +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record: logging.LogRecord) -> bool: + # The base Filter class allows only records from a logger (or its + # children). + return not super().filter(record) + + +def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int: + """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. + """ + + # Determine the level to be logging at. + if verbosity >= 2: + level_number = logging.DEBUG + elif verbosity == 1: + level_number = VERBOSE + elif verbosity == -1: + level_number = logging.WARNING + elif verbosity == -2: + level_number = logging.ERROR + elif verbosity <= -3: + level_number = logging.CRITICAL + else: + level_number = logging.INFO + + level = logging.getLevelName(level_number) + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.RichPipStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) + + logging.config.dictConfig( + { + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_subprocess", "exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "stream": log_streams["stderr"], + "no_color": no_color, + "filters": ["restrict_to_subprocess"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "encoding": "utf-8", + "delay": True, + "formatter": "indent_with_timestamp", + }, + }, + "root": { + "level": root_level, + "handlers": handlers, + }, + "loggers": {"pip._vendor": {"level": vendored_log_level}}, + } + ) + + return level_number diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/misc.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 000000000..0bf9e99af --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,653 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import contextlib +import errno +import getpass +import hashlib +import io +import logging +import os +import posixpath +import shutil +import stat +import sys +import urllib.parse +from io import StringIO +from itertools import filterfalse, tee, zip_longest +from types import TracebackType +from typing import ( + Any, + BinaryIO, + Callable, + ContextManager, + Iterable, + Iterator, + List, + Optional, + TextIO, + Tuple, + Type, + TypeVar, + cast, +) + +from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed + +from pip import __version__ +from pip._internal.exceptions import CommandError +from pip._internal.locations import get_major_minor_version +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.virtualenv import running_under_virtualenv + +__all__ = [ + "rmtree", + "display_path", + "backup_dir", + "ask", + "splitext", + "format_size", + "is_installable_dir", + "normalize_path", + "renames", + "get_prog", + "captured_stdout", + "ensure_dir", + "remove_auth_from_url", +] + + +logger = logging.getLogger(__name__) + +T = TypeVar("T") +ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] +VersionInfo = Tuple[int, int, int] +NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] + + +def get_pip_version() -> str: + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return "pip {} from {} (python {})".format( + __version__, + pip_pkg_dir, + get_major_minor_version(), + ) + + +def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]: + """ + Convert a tuple of ints representing a Python version to one of length + three. + + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. + + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] + + return cast("VersionInfo", py_version_info) + + +def ensure_dir(path: str) -> None: + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + raise + + +def get_prog() -> str: + try: + prog = os.path.basename(sys.argv[0]) + if prog in ("__main__.py", "-c"): + return f"{sys.executable} -m pip" + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return "pip" + + +# Retry every half second for up to 3 seconds +# Tenacity raises RetryError by default, explicitly raise the original exception +@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) +def rmtree(dir: str, ignore_errors: bool = False) -> None: + shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None: + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + try: + has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + except OSError: + # it's equivalent to os.path.exists + return + + if has_attr_readonly: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def display_path(path: str) -> str: + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if path.startswith(os.getcwd() + os.path.sep): + path = "." + path[len(os.getcwd()) :] + return path + + +def backup_dir(dir: str, ext: str = ".bak") -> str: + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message: str, options: Iterable[str]) -> str: + for action in os.environ.get("PIP_EXISTS_ACTION", "").split(): + if action in options: + return action + return ask(message, options) + + +def _check_no_input(message: str) -> None: + """Raise an error if no input is allowed.""" + if os.environ.get("PIP_NO_INPUT"): + raise Exception( + f"No input was expected ($PIP_NO_INPUT set); question: {message}" + ) + + +def ask(message: str, options: Iterable[str]) -> str: + """Ask the message interactively, with the given possible responses""" + while 1: + _check_no_input(message) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + "Your response ({!r}) was not one of the expected responses: " + "{}".format(response, ", ".join(options)) + ) + else: + return response + + +def ask_input(message: str) -> str: + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message: str) -> str: + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + +def strtobool(val: str) -> int: + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError(f"invalid truth value {val!r}") + + +def format_size(bytes: float) -> str: + if bytes > 1000 * 1000: + return "{:.1f} MB".format(bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return "{} kB".format(int(bytes / 1000)) + elif bytes > 1000: + return "{:.1f} kB".format(bytes / 1000.0) + else: + return "{} bytes".format(int(bytes)) + + +def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]: + """Return a list of formatted rows and a list of column sizes. + + For example:: + + >>> tabulate([['foobar', 2000], [0xdeadbeef]]) + (['foobar 2000', '3735928559'], [10, 4]) + """ + rows = [tuple(map(str, row)) for row in rows] + sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")] + table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows] + return table, sizes + + +def is_installable_dir(path: str) -> bool: + """Is path is a directory containing pyproject.toml or setup.py? + + If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for + a legacy setuptools layout by identifying setup.py. We don't check for the + setup.cfg because using it without setup.py is only available for PEP 517 + projects, which are already covered by the pyproject.toml check. + """ + if not os.path.isdir(path): + return False + if os.path.isfile(os.path.join(path, "pyproject.toml")): + return True + if os.path.isfile(os.path.join(path, "setup.py")): + return True + return False + + +def read_chunks(file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE) -> Iterator[bytes]: + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def normalize_path(path: str, resolve_symlinks: bool = True) -> str: + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = os.path.expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path: str) -> Tuple[str, str]: + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith(".tar"): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old: str, new: str) -> None: + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path: str) -> bool: + """ + Return True if this is a path pip is allowed to modify. + + If we're in a virtualenv, sys.prefix points to the virtualenv's + prefix; only sys.prefix is considered local. + + If we're not in a virtualenv, in general we can modify anything. + However, if the OS vendor has configured distutils to install + somewhere other than sys.prefix (which could be a subdirectory of + sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal + and the domain of the OS vendor. (In other words, everything _other + than_ sys.prefix is considered local.) + + Caution: this function assumes the head of path has been normalized + with normalize_path. + """ + + path = normalize_path(path) + # Hard-coded becouse PyPy uses a different sys.prefix on Debian + prefix = '/usr' + + if running_under_virtualenv(): + return path.startswith(normalize_path(sys.prefix)) + else: + from pip._internal.locations import get_scheme + from pip._internal.models.scheme import SCHEME_KEYS + if path.startswith(prefix): + scheme = get_scheme("") + for key in SCHEME_KEYS: + local_path = getattr(scheme, key) + if path.startswith(normalize_path(local_path)): + return True + return False + else: + return True + + +def write_output(msg: Any, *args: Any) -> None: + logger.info(msg, *args) + + +class StreamWrapper(StringIO): + orig_stream: TextIO = None + + @classmethod + def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + # https://github.com/python/mypy/issues/4125 + @property + def encoding(self): # type: ignore + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name: str) -> Iterator[StreamWrapper]: + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout() -> ContextManager[StreamWrapper]: + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output("stdout") + + +def captured_stderr() -> ContextManager[StreamWrapper]: + """ + See captured_stdout(). + """ + return captured_output("stderr") + + +# Simulates an enum +def enum(*sequential: Any, **named: Any) -> Type[Any]: + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums["reverse_mapping"] = reverse + return type("Enum", (), enums) + + +def build_netloc(host: str, port: Optional[int]) -> str: + """ + Build a netloc from a host-port pair + """ + if port is None: + return host + if ":" in host: + # Only wrap host with square brackets when it is IPv6 + host = f"[{host}]" + return f"{host}:{port}" + + +def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: + """ + Build a full URL from a netloc. + """ + if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = f"[{netloc}]" + return f"{scheme}://{netloc}" + + +def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]: + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib.parse.urlparse(url) + return parsed.hostname, parsed.port + + +def split_auth_from_netloc(netloc: str) -> NetlocTuple: + """ + Parse out and remove the auth information from a netloc. + + Returns: (netloc, (username, password)). + """ + if "@" not in netloc: + return netloc, (None, None) + + # Split from the right because that's how urllib.parse.urlsplit() + # behaves if more than one @ is present (which can be checked using + # the password attribute of urlsplit()'s return value). + auth, netloc = netloc.rsplit("@", 1) + pw: Optional[str] = None + if ":" in auth: + # Split from the left because that's how urllib.parse.urlsplit() + # behaves if more than one : is present (which again can be checked + # using the password attribute of the return value) + user, pw = auth.split(":", 1) + else: + user, pw = auth, None + + user = urllib.parse.unquote(user) + if pw is not None: + pw = urllib.parse.unquote(pw) + + return netloc, (user, pw) + + +def redact_netloc(netloc: str) -> str: + """ + Replace the sensitive data in a netloc with "****", if it exists. + + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" + """ + netloc, (user, password) = split_auth_from_netloc(netloc) + if user is None: + return netloc + if password is None: + user = "****" + password = "" + else: + user = urllib.parse.quote(user) + password = ":****" + return "{user}{password}@{netloc}".format( + user=user, password=password, netloc=netloc + ) + + +def _transform_url( + url: str, transform_netloc: Callable[[str], Tuple[Any, ...]] +) -> Tuple[str, NetlocTuple]: + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ + purl = urllib.parse.urlsplit(url) + netloc_tuple = transform_netloc(purl.netloc) + # stripped url + url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment) + surl = urllib.parse.urlunsplit(url_pieces) + return surl, cast("NetlocTuple", netloc_tuple) + + +def _get_netloc(netloc: str) -> NetlocTuple: + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc: str) -> Tuple[str]: + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]: + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth + + +def remove_auth_from_url(url: str) -> str: + """Return a copy of url with 'username:password@' removed.""" + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + return _transform_url(url, _get_netloc)[0] + + +def redact_auth_from_url(url: str) -> str: + """Replace the password in a given url with ****.""" + return _transform_url(url, _redact_netloc)[0] + + +class HiddenText: + def __init__(self, secret: str, redacted: str) -> None: + self.secret = secret + self.redacted = redacted + + def __repr__(self) -> str: + return "".format(str(self)) + + def __str__(self) -> str: + return self.redacted + + # This is useful for testing. + def __eq__(self, other: Any) -> bool: + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return self.secret == other.secret + + +def hide_value(value: str) -> HiddenText: + return HiddenText(value, redacted="****") + + +def hide_url(url: str) -> HiddenText: + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) + + +def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]), + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [sys.executable, "-m", "pip"] + sys.argv[1:] + raise CommandError( + "To modify pip, please run the following command:\n{}".format( + " ".join(new_command) + ) + ) + + +def is_console_interactive() -> bool: + """Is this console interactive?""" + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]: + """Return (hash, length) for path using hashlib.sha256()""" + + h = hashlib.sha256() + length = 0 + with open(path, "rb") as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed() -> bool: + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True + + +def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]: + """ + Return paired elements. + + For example: + s -> (s0, s1), (s2, s3), (s4, s5), ... + """ + iterable = iter(iterable) + return zip_longest(iterable, iterable) + + +def partition( + pred: Callable[[T], bool], + iterable: Iterable[T], +) -> Tuple[Iterable[T], Iterable[T]]: + """ + Use a predicate to partition entries into false entries and true entries, + like + + partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 + """ + t1, t2 = tee(iterable) + return filterfalse(pred, t1), filter(pred, t2) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/models.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/models.py new file mode 100644 index 000000000..b6bb21a8b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/models.py @@ -0,0 +1,39 @@ +"""Utilities for defining models +""" + +import operator +from typing import Any, Callable, Type + + +class KeyBasedCompareMixin: + """Provides comparison capabilities that is based on a key""" + + __slots__ = ["_compare_key", "_defining_class"] + + def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None: + self._compare_key = key + self._defining_class = defining_class + + def __hash__(self) -> int: + return hash(self._compare_key) + + def __lt__(self, other: Any) -> bool: + return self._compare(other, operator.__lt__) + + def __le__(self, other: Any) -> bool: + return self._compare(other, operator.__le__) + + def __gt__(self, other: Any) -> bool: + return self._compare(other, operator.__gt__) + + def __ge__(self, other: Any) -> bool: + return self._compare(other, operator.__ge__) + + def __eq__(self, other: Any) -> bool: + return self._compare(other, operator.__eq__) + + def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool: + if not isinstance(other, self._defining_class): + return NotImplemented + + return method(self._compare_key, other._compare_key) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/packaging.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/packaging.py new file mode 100644 index 000000000..b9f6af4d1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/packaging.py @@ -0,0 +1,57 @@ +import functools +import logging +import re +from typing import NewType, Optional, Tuple, cast + +from pip._vendor.packaging import specifiers, version +from pip._vendor.packaging.requirements import Requirement + +NormalizedExtra = NewType("NormalizedExtra", str) + +logger = logging.getLogger(__name__) + + +def check_requires_python( + requires_python: Optional[str], version_info: Tuple[int, ...] +) -> bool: + """ + Check if the given Python version matches a "Requires-Python" specifier. + + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). + + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + python_version = version.parse(".".join(map(str, version_info))) + return python_version in requires_python_specifier + + +@functools.lru_cache(maxsize=512) +def get_requirement(req_string: str) -> Requirement: + """Construct a packaging.Requirement object with caching""" + # Parsing requirement strings is expensive, and is also expected to happen + # with a low diversity of different arguments (at least relative the number + # constructed). This method adds a cache to requirement object creation to + # minimize repeated parsing of the same string to construct equivalent + # Requirement objects. + return Requirement(req_string) + + +def safe_extra(extra: str) -> NormalizedExtra: + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + + This function is duplicated from ``pkg_resources``. Note that this is not + the same to either ``canonicalize_name`` or ``_egg_link_name``. + """ + return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/setuptools_build.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/setuptools_build.py new file mode 100644 index 000000000..f460c4003 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/setuptools_build.py @@ -0,0 +1,195 @@ +import sys +import textwrap +from typing import List, Optional, Sequence + +# Shim to wrap setup.py invocation with setuptools +# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on +# Windows are correctly handled (it should be "C:\\Users" not "C:\Users"). +_SETUPTOOLS_SHIM = textwrap.dedent( + """ + exec(compile(''' + # This is -- a caller that pip uses to run setup.py + # + # - It imports setuptools before invoking setup.py, to enable projects that directly + # import from `distutils.core` to work with newer packaging standards. + # - It provides a clear error message when setuptools is not installed. + # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so + # setuptools doesn't think the script is `-c`. This avoids the following warning: + # manifest_maker: standard file '-c' not found". + # - It generates a shim setup.py, for handling setup.cfg-only projects. + import os, sys, tokenize + + try: + import setuptools + except ImportError as error: + print( + "ERROR: Can not execute `setup.py` since setuptools is not available in " + "the build environment.", + file=sys.stderr, + ) + sys.exit(1) + + __file__ = %r + sys.argv[0] = __file__ + + if os.path.exists(__file__): + filename = __file__ + with tokenize.open(__file__) as f: + setup_py_code = f.read() + else: + filename = "" + setup_py_code = "from setuptools import setup; setup()" + + exec(compile(setup_py_code, filename, "exec")) + ''' % ({!r},), "", "exec")) + """ +).rstrip() + + +def make_setuptools_shim_args( + setup_py_path: str, + global_options: Sequence[str] = None, + no_user_config: bool = False, + unbuffered_output: bool = False, +) -> List[str]: + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path: str, + global_options: Sequence[str], + build_options: Sequence[str], + destination_dir: str, +) -> List[str]: + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path: str, + global_options: Sequence[str], +) -> List[str]: + args = make_setuptools_shim_args( + setup_py_path, global_options=global_options, unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path: str, + global_options: Sequence[str], + install_options: Sequence[str], + no_user_config: bool, + prefix: Optional[str], + home: Optional[str], + use_user_site: bool, +) -> List[str]: + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--install-dir", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path: str, + egg_info_dir: Optional[str], + no_user_config: bool, +) -> List[str]: + args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config) + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path: str, + global_options: Sequence[str], + install_options: Sequence[str], + record_filename: str, + root: Optional[str], + prefix: Optional[str], + header_dir: Optional[str], + home: Optional[str], + use_user_site: bool, + no_user_config: bool, + pycompile: bool, +) -> List[str]: + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True, + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + + return args diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/subprocess.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 000000000..b5b762418 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,260 @@ +import logging +import os +import shlex +import subprocess +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterable, + List, + Mapping, + Optional, + Union, +) + +from pip._vendor.rich.markup import escape + +from pip._internal.cli.spinners import SpinnerInterface, open_spinner +from pip._internal.exceptions import InstallationSubprocessError +from pip._internal.utils.logging import VERBOSE, subprocess_logger +from pip._internal.utils.misc import HiddenText + +if TYPE_CHECKING: + # Literal was introduced in Python 3.8. + # + # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. + from typing import Literal + +CommandArgs = List[Union[str, HiddenText]] + + +def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs: + """ + Create a CommandArgs object. + """ + command_args: CommandArgs = [] + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args: Union[List[str], CommandArgs]) -> str: + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return " ".join( + shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg) + for arg in args + ) + + +def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]: + """ + Return the arguments in their raw, unredacted form. + """ + return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args] + + +def call_subprocess( + cmd: Union[List[str], CommandArgs], + show_stdout: bool = False, + cwd: Optional[str] = None, + on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", + extra_ok_returncodes: Optional[Iterable[int]] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + unset_environ: Optional[Iterable[str]] = None, + spinner: Optional[SpinnerInterface] = None, + log_failed_cmd: Optional[bool] = True, + stdout_only: Optional[bool] = False, + *, + command_desc: str, +) -> str: + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + stdout_only: if true, return only stdout, else return both. When true, + logging of both stdout and stderr occurs when the subprocess has + terminated, else logging occurs as subprocess output is produced. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using VERBOSE. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.verbose + used_level = VERBOSE + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, + cwd=cwd, + env=env, + errors="backslashreplace", + ) + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", + exc, + command_desc, + ) + raise + all_output = [] + if not stdout_only: + assert proc.stdout + assert proc.stdin + proc.stdin.close() + # In this mode, stdout and stderr are in the same pipe. + while True: + line: str = proc.stdout.readline() + if not line: + break + line = line.rstrip() + all_output.append(line + "\n") + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + assert spinner + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + output = "".join(all_output) + else: + # In this mode, stdout and stderr are in different pipes. + # We must use communicate() which is the only safe way to read both. + out, err = proc.communicate() + # log line by line to preserve pip log indenting + for out_line in out.splitlines(): + log_subprocess(out_line) + all_output.append(out) + for err_line in err.splitlines(): + log_subprocess(err_line) + all_output.append(err) + output = out + + proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes + if use_spinner: + assert spinner + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == "raise": + error = InstallationSubprocessError( + command_description=command_desc, + exit_code=proc.returncode, + output_lines=all_output if not showing_subprocess else None, + ) + if log_failed_cmd: + subprocess_logger.error("[present-diagnostic] %s", error) + subprocess_logger.verbose( + "[bold magenta]full command[/]: [blue]%s[/]", + escape(format_command_args(cmd)), + extra={"markup": True}, + ) + subprocess_logger.verbose( + "[bold magenta]cwd[/]: %s", + escape(cwd or "[inherit]"), + extra={"markup": True}, + ) + + raise error + elif on_returncode == "warn": + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, + proc.returncode, + cwd, + ) + elif on_returncode == "ignore": + pass + else: + raise ValueError(f"Invalid value: on_returncode={on_returncode!r}") + return output + + +def runner_with_spinner_message(message: str) -> Callable[..., None]: + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for pep517's Pep517HookCaller. Thus, the runner has + an API that matches what's expected by Pep517HookCaller.subprocess_runner. + """ + + def runner( + cmd: List[str], + cwd: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + ) -> None: + with open_spinner(message) as spinner: + call_subprocess( + cmd, + command_desc=message, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 000000000..442679a75 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,246 @@ +import errno +import itertools +import logging +import os.path +import tempfile +from contextlib import ExitStack, contextmanager +from typing import Any, Dict, Iterator, Optional, TypeVar, Union + +from pip._internal.utils.misc import enum, rmtree + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="TempDirectory") + + +# Kinds of temporary directories. Only needed for ones that are +# globally-managed. +tempdir_kinds = enum( + BUILD_ENV="build-env", + EPHEM_WHEEL_CACHE="ephem-wheel-cache", + REQ_BUILD="req-build", +) + + +_tempdir_manager: Optional[ExitStack] = None + + +@contextmanager +def global_tempdir_manager() -> Iterator[None]: + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry: + """Manages temp directory behavior""" + + def __init__(self) -> None: + self._should_delete: Dict[str, bool] = {} + + def set_delete(self, kind: str, value: bool) -> None: + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind: str) -> bool: + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None + + +@contextmanager +def tempdir_registry() -> Iterator[TempDirectoryTypeRegistry]: + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class _Default: + pass + + +_default = _Default() + + +class TempDirectory: + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + cleanup() + Deletes the temporary directory + + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. + """ + + def __init__( + self, + path: Optional[str] = None, + delete: Union[bool, None, _Default] = _default, + kind: str = "temp", + globally_managed: bool = False, + ): + super().__init__() + + if delete is _default: + if path is not None: + # If we were given an explicit directory, resolve delete option + # now. + delete = False + else: + # Otherwise, we wait until cleanup and see what + # tempdir_registry says. + delete = None + + # The only time we specify path is in for editables where it + # is the value of the --src option. + if path is None: + path = self._create(kind) + + self._path = path + self._deleted = False + self.delete = delete + self.kind = kind + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self) -> str: + assert not self._deleted, f"Attempted to access deleted path: {self._path}" + return self._path + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.path!r}>" + + def __enter__(self: _T) -> _T: + return self + + def __exit__(self, exc: Any, value: Any, tb: Any) -> None: + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: + self.cleanup() + + def _create(self, kind: str) -> str: + """Create a temporary directory and store its path in self.path""" + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + logger.debug("Created temporary directory: %s", path) + return path + + def cleanup(self) -> None: + """Remove the temporary directory created and reset state""" + self._deleted = True + if not os.path.exists(self._path): + return + rmtree(self._path) + + +class AdjacentTempDirectory(TempDirectory): + """Helper class that creates a temporary directory adjacent to a real one. + + Attributes: + original + The original directory to create a temp directory for. + path + After calling create() or entering, contains the full + path to the temporary directory. + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + """ + + # The characters that may be used to name the temp directory + # We always prepend a ~ and then rotate through these until + # a usable name is found. + # pkg_resources raises a different error for .dist-info folder + # with leading '-' and invalid metadata + LEADING_CHARS = "-~.=%0123456789" + + def __init__(self, original: str, delete: Optional[bool] = None) -> None: + self.original = original.rstrip("/\\") + super().__init__(delete=delete) + + @classmethod + def _generate_names(cls, name: str) -> Iterator[str]: + """Generates a series of temporary names. + + The algorithm replaces the leading characters in the name + with ones that are valid filesystem characters, but are not + valid package names (for both Python and pip definitions of + package). + """ + for i in range(1, len(name)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i - 1 + ): + new_name = "~" + "".join(candidate) + name[i:] + if new_name != name: + yield new_name + + # If we make it this far, we will have to make a longer name + for i in range(len(cls.LEADING_CHARS)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i + ): + new_name = "~" + "".join(candidate) + name + if new_name != name: + yield new_name + + def _create(self, kind: str) -> str: + root, name = os.path.split(self.original) + for candidate in self._generate_names(name): + path = os.path.join(root, candidate) + try: + os.mkdir(path) + except OSError as ex: + # Continue if the name exists already + if ex.errno != errno.EEXIST: + raise + else: + path = os.path.realpath(path) + break + else: + # Final fallback on the default behavior. + path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-")) + + logger.debug("Created temporary directory: %s", path) + return path diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/unpacking.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 000000000..5f63f974b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,258 @@ +"""Utilities related archives. +""" + +import logging +import os +import shutil +import stat +import tarfile +import zipfile +from typing import Iterable, List, Optional +from zipfile import ZipInfo + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug("bz2 module is not available") + +try: + # Only for Python 3.3+ + import lzma # noqa + + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug("lzma module is not available") + + +def current_umask() -> int: + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path: str) -> List[str]: + path = path.lstrip("/").lstrip("\\") + if "/" in path and ( + ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path + ): + return path.split("/", 1) + elif "\\" in path: + return path.split("\\", 1) + else: + return [path, ""] + + +def has_leading_dir(paths: Iterable[str]) -> bool: + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory: str, target: str) -> bool: + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def set_extracted_file_to_default_mode_plus_executable(path: str) -> None: + """ + Make file present at path have execute for user/group/world + (chmod +x) is no-op on windows per python docs + """ + os.chmod(path, (0o777 & ~current_umask() | 0o111)) + + +def zip_item_is_executable(info: ZipInfo) -> bool: + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + return bool(mode and stat.S_ISREG(mode) and mode & 0o111) + + +def unzip_file(filename: str, location: str, flatten: bool = True) -> None: + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, "rb") + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + "The zip file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith("/") or fn.endswith("\\"): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + if zip_item_is_executable(info): + set_extracted_file_to_default_mode_plus_executable(fn) + finally: + zipfp.close() + + +def untar_file(filename: str, location: str) -> None: + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"): + mode = "r:gz" + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = "r:bz2" + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = "r:xz" + elif filename.lower().endswith(".tar"): + mode = "r" + else: + logger.warning( + "Cannot determine compression type for file %s", + filename, + ) + mode = "r:*" + tar = tarfile.open(filename, mode, encoding="utf-8") + try: + leading = has_leading_dir([member.name for member in tar.getmembers()]) + for member in tar.getmembers(): + fn = member.name + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + "The tar file ({}) has a file ({}) trying to install " + "outside target directory ({})" + ) + raise InstallationError(message.format(filename, path, location)) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + # https://github.com/python/typeshed/issues/2673 + tar._extract_member(member, path) # type: ignore + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + "In the tar file %s the member %s is invalid: %s", + filename, + member.name, + exc, + ) + continue + ensure_dir(os.path.dirname(path)) + assert fp is not None + with open(path, "wb") as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + set_extracted_file_to_default_mode_plus_executable(path) + finally: + tar.close() + + +def unpack_file( + filename: str, + location: str, + content_type: Optional[str] = None, +) -> None: + filename = os.path.realpath(filename) + if ( + content_type == "application/zip" + or filename.lower().endswith(ZIP_EXTENSIONS) + or zipfile.is_zipfile(filename) + ): + unzip_file(filename, location, flatten=not filename.endswith(".whl")) + elif ( + content_type == "application/x-gzip" + or tarfile.is_tarfile(filename) + or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + "Cannot unpack file %s (downloaded from %s, content-type: %s); " + "cannot detect archive format", + filename, + location, + content_type, + ) + raise InstallationError(f"Cannot determine archive format of {location}") diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/urls.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 000000000..6ba2e04f3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,62 @@ +import os +import string +import urllib.parse +import urllib.request +from typing import Optional + +from .compat import WINDOWS + + +def get_url_scheme(url: str) -> Optional[str]: + if ":" not in url: + return None + return url.split(":", 1)[0].lower() + + +def path_to_url(path: str) -> str: + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path)) + return url + + +def url_to_path(url: str) -> str: + """ + Convert a file: URL to a path. + """ + assert url.startswith( + "file:" + ), f"You can only turn file: urls into filenames (not {url!r})" + + _, netloc, path, _, _ = urllib.parse.urlsplit(url) + + if not netloc or netloc == "localhost": + # According to RFC 8089, same as empty authority. + netloc = "" + elif WINDOWS: + # If we have a UNC path, prepend UNC share notation. + netloc = "\\\\" + netloc + else: + raise ValueError( + f"non-local file URIs are not supported on this platform: {url!r}" + ) + + path = urllib.request.url2pathname(netloc + path) + + # On Windows, urlsplit parses the path as something like "/C:/Users/foo". + # This creates issues for path-related functions like io.open(), so we try + # to detect and strip the leading slash. + if ( + WINDOWS + and not netloc # Not UNC. + and len(path) >= 3 + and path[0] == "/" # Leading slash to strip. + and path[1] in string.ascii_letters # Drive letter. + and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path. + ): + path = path[1:] + + return path diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/virtualenv.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 000000000..c926db4c3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,104 @@ +import logging +import os +import re +import site +import sys +from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?Ptrue|false)" +) + + +def _running_under_venv() -> bool: + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv() -> bool: + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, "real_prefix") + + +def running_under_virtualenv() -> bool: + """Return True if we're running inside a virtualenv, False otherwise.""" + return _running_under_venv() or _running_under_regular_virtualenv() + + +def _get_pyvenv_cfg_lines() -> Optional[List[str]]: + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg") + try: + # Although PEP 405 does not specify, the built-in venv module always + # writes with UTF-8. (pypa/pip#8717) + with open(pyvenv_cfg_file, encoding="utf-8") as f: + return f.read().splitlines() # avoids trailing newlines + except OSError: + return None + + +def _no_global_under_venv() -> bool: + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group("value") == "false": + return True + return False + + +def _no_global_under_regular_virtualenv() -> bool: + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, + "no-global-site-packages.txt", + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global() -> bool: + """Returns a boolean, whether running in venv with no system site-packages.""" + # PEP 405 compliance needs to be checked first since virtualenv >=20 would + # return True for both checks, but is only able to use the PEP 405 config. + if _running_under_venv(): + return _no_global_under_venv() + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + return False diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/utils/wheel.py b/myenv/lib/python3.10/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 000000000..e5e3f34ed --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,136 @@ +"""Support functions for working with wheel files. +""" + +import logging +from email.message import Message +from email.parser import Parser +from typing import Tuple +from zipfile import BadZipFile, ZipFile + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import UnsupportedWheel + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]: + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source: ZipFile, name: str) -> str: + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = {p.split("/", 1)[0] for p in source.namelist()} + + info_dirs = [s for s in subdirs if s.endswith(".dist-info")] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format(", ".join(info_dirs)) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + return info_dir + + +def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes: + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel(f"could not read {path!r} file: {e!r}") + + +def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message: + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = f"{dist_info_dir}/WHEEL" + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = wheel_contents.decode() + except UnicodeDecodeError as e: + raise UnsupportedWheel(f"error decoding {path!r}: {e!r}") + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data: Message) -> Tuple[int, ...]: + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split("."))) + except ValueError: + raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") + + +def check_compatibility(version: Tuple[int, ...], name: str) -> None: + """Raises errors or warns if called with an incompatible Wheel-Version. + + pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "{}'s Wheel-Version ({}) is not compatible with this version " + "of pip".format(name, ".".join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + "Installing from a newer Wheel-Version (%s)", + ".".join(map(str, version)), + ) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py new file mode 100644 index 000000000..b6beddbe6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py @@ -0,0 +1,15 @@ +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory may still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + RemoteNotValidError, + is_url, + make_vcs_requirement_url, + vcs, +) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..969460991 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc new file mode 100644 index 000000000..35300eccc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc new file mode 100644 index 000000000..c78c187d9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc new file mode 100644 index 000000000..c903837b2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc new file mode 100644 index 000000000..dddf579e1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc new file mode 100644 index 000000000..096f9466b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 000000000..a7b16e2e0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,101 @@ +import logging +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = "bzr" + dirname = ".bzr" + repo_name = "branch" + schemes = ( + "bzr+http", + "bzr+https", + "bzr+ssh", + "bzr+sftp", + "bzr+ftp", + "bzr+lp", + "bzr+file", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return ["-r", rev] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flag = "--quiet" + elif verbosity == 1: + flag = "" + else: + flag = f"-{'v'*verbosity}" + cmd_args = make_command("branch", flag, rev_options.to_args(), url, dest) + self.run_command(cmd_args) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command(make_command("switch", url), cwd=dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command("pull", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "bzr+" + url + return url, rev, user_pass + + @classmethod + def get_remote_url(cls, location: str) -> str: + urls = cls.run_command( + ["info"], show_stdout=False, stdout_only=True, cwd=location + ) + for line in urls.splitlines(): + line = line.strip() + for x in ("checkout of branch: ", "parent branch: "): + if line.startswith(x): + repo = line.split(x)[1] + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo + raise RemoteNotFoundError + + @classmethod + def get_revision(cls, location: str) -> str: + revision = cls.run_command( + ["revno"], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return revision.splitlines()[-1] + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/git.py b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 000000000..8d1d49937 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,526 @@ +import logging +import os.path +import pathlib +import re +import urllib.parse +import urllib.request +from typing import List, Optional, Tuple + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path, hide_url +from pip._internal.utils.subprocess import make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RemoteNotValidError, + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +urlsplit = urllib.parse.urlsplit +urlunsplit = urllib.parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +GIT_VERSION_REGEX = re.compile( + r"^git version " # Prefix. + r"(\d+)" # Major. + r"\.(\d+)" # Dot, minor. + r"(?:\.(\d+))?" # Optional dot, patch. + r".*$" # Suffix, including any pre- and post-release segments we don't care about. +) + +HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$") + +# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git' +SCP_REGEX = re.compile( + r"""^ + # Optional user, e.g. 'git@' + (\w+@)? + # Server, e.g. 'github.com'. + ([^/:]+): + # The server-side path. e.g. 'user/project.git'. Must start with an + # alphanumeric character so as not to be confusable with a Windows paths + # like 'C:/foo/bar' or 'C:\foo\bar'. + (\w[^:]*) + $""", + re.VERBOSE, +) + + +def looks_like_hash(sha: str) -> bool: + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = "git" + dirname = ".git" + repo_name = "clone" + schemes = ( + "git+http", + "git+https", + "git+ssh", + "git+git", + "git+file", + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ("GIT_DIR", "GIT_WORK_TREE") + default_arg_rev = "HEAD" + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return [rev] + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0]) + return not is_tag_or_branch + + def get_git_version(self) -> Tuple[int, ...]: + version = self.run_command( + ["version"], + command_desc="git version", + show_stdout=False, + stdout_only=True, + ) + match = GIT_VERSION_REGEX.match(version) + if not match: + logger.warning("Can't parse git version: %s", version) + return () + return tuple(int(c) for c in match.groups()) + + @classmethod + def get_current_branch(cls, location: str) -> Optional[str]: + """ + Return the current branch, or None if HEAD isn't at a branch + (e.g. detached HEAD). + """ + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. + args = ["symbolic-ref", "-q", "HEAD"] + output = cls.run_command( + args, + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + ref = output.strip() + + if ref.startswith("refs/heads/"): + return ref[len("refs/heads/") :] + + return None + + @classmethod + def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]: + """ + Return (sha_or_none, is_branch), where sha_or_none is a commit hash + if the revision names a remote branch or tag, otherwise None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = cls.run_command( + ["show-ref", rev], + cwd=dest, + show_stdout=False, + stdout_only=True, + on_returncode="ignore", + ) + refs = {} + # NOTE: We do not use splitlines here since that would split on other + # unicode separators, which can be maliciously used to install a + # different revision. + for line in output.strip().split("\n"): + line = line.rstrip("\r") + if not line: + continue + try: + ref_sha, ref_name = line.split(" ", maxsplit=2) + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError(f"unexpected show-ref line: {line!r}") + + refs[ref_name] = ref_sha + + branch_ref = f"refs/remotes/origin/{rev}" + tag_ref = f"refs/tags/{rev}" + + sha = refs.get(branch_ref) + if sha is not None: + return (sha, True) + + sha = refs.get(tag_ref) + + return (sha, False) + + @classmethod + def _should_fetch(cls, dest: str, rev: str) -> bool: + """ + Return true if rev is a ref or is a commit that we don't have locally. + + Branches and tags are not considered in this method because they are + assumed to be always available locally (which is a normal outcome of + ``git clone`` and ``git fetch --tags``). + """ + if rev.startswith("refs/"): + # Always fetch remote refs. + return True + + if not looks_like_hash(rev): + # Git fetch would fail with abbreviated commits. + return False + + if cls.has_commit(dest, rev): + # Don't fetch if we have the commit locally. + return False + + return True + + @classmethod + def resolve_revision( + cls, dest: str, url: HiddenText, rev_options: RevOptions + ) -> RevOptions: + """ + Resolve a revision to a new RevOptions object with the SHA1 of the + branch, tag, or ref if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) + + if sha is not None: + rev_options = rev_options.make_new(sha) + rev_options.branch_name = rev if is_branch else None + + return rev_options + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.warning( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + + if not cls._should_fetch(dest, rev): + return rev_options + + # fetch the requested revision + cls.run_command( + make_command("fetch", "-q", url, rev_options.to_args()), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + sha = cls.get_revision(dest, rev="FETCH_HEAD") + rev_options = rev_options.make_new(sha) + + return rev_options + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return cls.get_revision(dest) == name + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest)) + if verbosity <= 0: + flags: Tuple[str, ...] = ("--quiet",) + elif verbosity == 1: + flags = () + else: + flags = ("--verbose", "--progress") + if self.get_git_version() >= (2, 17): + # Git added support for partial clone in 2.17 + # https://git-scm.com/docs/partial-clone + # Speeds up cloning by functioning without a complete copy of repository + self.run_command( + make_command( + "clone", + "--filter=blob:none", + *flags, + url, + dest, + ) + ) + else: + self.run_command(make_command("clone", *flags, url, dest)) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.resolve_revision(dest, url, rev_options) + branch_name = getattr(rev_options, "branch_name", None) + logger.debug("Rev options %s, branch_name %s", rev_options, branch_name) + if branch_name is None: + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + cmd_args = make_command( + "checkout", + "-q", + rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. + track_branch = f"origin/{branch_name}" + cmd_args = [ + "checkout", + "-b", + branch_name, + "--track", + track_branch, + ] + self.run_command(cmd_args, cwd=dest) + else: + sha = self.get_revision(dest) + rev_options = rev_options.make_new(sha) + + logger.info("Resolved %s to commit %s", url, rev_options.rev) + + #: repo may contain submodules + self.update_submodules(dest) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command( + make_command("config", "remote.origin.url", url), + cwd=dest, + ) + cmd_args = make_command("checkout", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + # First fetch changes from the default remote + if self.get_git_version() >= (1, 9): + # fetch tags in addition to everything else + self.run_command(["fetch", "-q", "--tags"], cwd=dest) + else: + self.run_command(["fetch", "-q"], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.resolve_revision(dest, url, rev_options) + cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = cls.run_command( + ["config", "--get-regexp", r"remote\..*\.url"], + extra_ok_returncodes=(1,), + show_stdout=False, + stdout_only=True, + cwd=location, + ) + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + + for remote in remotes: + if remote.startswith("remote.origin.url "): + found_remote = remote + break + url = found_remote.split(" ")[1] + return cls._git_remote_to_pip_url(url.strip()) + + @staticmethod + def _git_remote_to_pip_url(url: str) -> str: + """ + Convert a remote url from what git uses to what pip accepts. + + There are 3 legal forms **url** may take: + + 1. A fully qualified url: ssh://git@example.com/foo/bar.git + 2. A local project.git folder: /path/to/bare/repository.git + 3. SCP shorthand for form 1: git@example.com:foo/bar.git + + Form 1 is output as-is. Form 2 must be converted to URI and form 3 must + be converted to form 1. + + See the corresponding test test_git_remote_url_to_pip() for examples of + sample inputs/outputs. + """ + if re.match(r"\w+://", url): + # This is already valid. Pass it though as-is. + return url + if os.path.exists(url): + # A local bare remote (git clone --mirror). + # Needs a file:// prefix. + return pathlib.PurePath(url).as_uri() + scp_match = SCP_REGEX.match(url) + if scp_match: + # Add an ssh:// prefix and replace the ':' with a '/'. + return scp_match.expand(r"ssh://\1\2/\3") + # Otherwise, bail out. + raise RemoteNotValidError(url) + + @classmethod + def has_commit(cls, location: str, rev: str) -> bool: + """ + Check if rev is a commit that is available in the local repository. + """ + try: + cls.run_command( + ["rev-parse", "-q", "--verify", "sha^" + rev], + cwd=location, + log_failed_cmd=False, + ) + except InstallationError: + return False + else: + return True + + @classmethod + def get_revision(cls, location: str, rev: Optional[str] = None) -> str: + if rev is None: + rev = "HEAD" + current_rev = cls.run_command( + ["rev-parse", rev], + show_stdout=False, + stdout_only=True, + cwd=location, + ) + return current_rev.strip() + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + git_dir = cls.run_command( + ["rev-parse", "--git-dir"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + repo_root = os.path.abspath(os.path.join(git_dir, "..")) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith("file"): + initial_slashes = path[: -len(path.lstrip("/"))] + newpath = initial_slashes + urllib.request.url2pathname(path).replace( + "\\", "/" + ).lstrip("/") + after_plus = scheme.find("+") + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + if "://" not in url: + assert "file:" not in url + url = url.replace("git+", "git+ssh://") + url, rev, user_pass = super().get_url_rev_and_auth(url) + url = url.replace("ssh://", "") + else: + url, rev, user_pass = super().get_url_rev_and_auth(url) + + return url, rev, user_pass + + @classmethod + def update_submodules(cls, location: str) -> None: + if not os.path.exists(os.path.join(location, ".gitmodules")): + return + cls.run_command( + ["submodule", "update", "--init", "--recursive", "-q"], + cwd=location, + ) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["rev-parse", "--show-toplevel"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under git control " + "because git is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + @staticmethod + def should_add_vcs_url_prefix(repo_url: str) -> bool: + """In either https or ssh form, requirements must be prefixed with git+.""" + return True + + +vcs.register(Git) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 000000000..2a005e0af --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,163 @@ +import configparser +import logging +import os +from typing import List, Optional, Tuple + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import HiddenText, display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + RevOptions, + VersionControl, + find_path_to_project_root_from_repo_root, + vcs, +) + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = "hg" + dirname = ".hg" + repo_name = "clone" + schemes = ( + "hg+file", + "hg+http", + "hg+https", + "hg+ssh", + "hg+static-http", + ) + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return [rev] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Cloning hg %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flags: Tuple[str, ...] = ("--quiet",) + elif verbosity == 1: + flags = () + elif verbosity == 2: + flags = ("--verbose",) + else: + flags = ("--verbose", "--debug") + self.run_command(make_command("clone", "--noupdate", *flags, url, dest)) + self.run_command( + make_command("update", *flags, rev_options.to_args()), + cwd=dest, + ) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + repo_config = os.path.join(dest, self.dirname, "hgrc") + config = configparser.RawConfigParser() + try: + config.read(repo_config) + config.set("paths", "default", url.secret) + with open(repo_config, "w") as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning("Could not switch Mercurial repository to %s: %s", url, exc) + else: + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + self.run_command(["pull", "-q"], cwd=dest) + cmd_args = make_command("update", "-q", rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_remote_url(cls, location: str) -> str: + url = cls.run_command( + ["showconfig", "paths.default"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + if cls._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the repository-local changeset revision number, as an integer. + """ + current_revision = cls.run_command( + ["parents", "--template={rev}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_revision + + @classmethod + def get_requirement_revision(cls, location: str) -> str: + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ + current_rev_hash = cls.run_command( + ["parents", "--template={node}"], + show_stdout=False, + stdout_only=True, + cwd=location, + ).strip() + return current_rev_hash + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ["root"], show_stdout=False, stdout_only=True, cwd=location + ).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_project_root_from_repo_root(location, repo_root) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + loc = super().get_repository_root(location) + if loc: + return loc + try: + r = cls.run_command( + ["root"], + cwd=location, + show_stdout=False, + stdout_only=True, + on_returncode="raise", + log_failed_cmd=False, + ) + except BadCommand: + logger.debug( + "could not determine if %s is under hg control " + "because hg is not available", + location, + ) + return None + except InstallationError: + return None + return os.path.normpath(r.rstrip("\r\n")) + + +vcs.register(Mercurial) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py new file mode 100644 index 000000000..89c8754ce --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py @@ -0,0 +1,324 @@ +import logging +import os +import re +from typing import List, Optional, Tuple + +from pip._internal.utils.misc import ( + HiddenText, + display_path, + is_console_interactive, + is_installable_dir, + split_auth_from_netloc, +) +from pip._internal.utils.subprocess import CommandArgs, make_command +from pip._internal.vcs.versioncontrol import ( + AuthInfo, + RemoteNotFoundError, + RevOptions, + VersionControl, + vcs, +) + +logger = logging.getLogger(__name__) + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile(r'committed-rev="(\d+)"') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r"(.*)") + + +class Subversion(VersionControl): + name = "svn" + dirname = ".svn" + repo_name = "checkout" + schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file") + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + return True + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + return ["-r", rev] + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, _ in os.walk(location): + if cls.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(cls.dirname) + entries_fn = os.path.join(base, cls.dirname, "entries") + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = cls._get_svn_url_rev(base) + + if base == location: + assert dirurl is not None + base = dirurl + "/" # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return str(revision) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: + """ + This override allows the auth information to be passed to svn via the + --username and --password options instead of via the URL. + """ + if scheme == "ssh": + # The --username and --password options can't be used for + # svn+ssh URLs, so keep the auth information in the URL. + return super().get_netloc_and_auth(netloc, scheme) + + return split_auth_from_netloc(netloc) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev, user_pass = super().get_url_rev_and_auth(url) + if url.startswith("ssh://"): + url = "svn+" + url + return url, rev, user_pass + + @staticmethod + def make_rev_args( + username: Optional[str], password: Optional[HiddenText] + ) -> CommandArgs: + extra_args: CommandArgs = [] + if username: + extra_args += ["--username", username] + if password: + extra_args += ["--password", password] + + return extra_args + + @classmethod + def get_remote_url(cls, location: str) -> str: + # In cases where the source is in a subdirectory, we have to look up in + # the location until we find a valid project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + raise RemoteNotFoundError + + url, _rev = cls._get_svn_url_rev(location) + if url is None: + raise RemoteNotFoundError + + return url + + @classmethod + def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]: + from pip._internal.exceptions import InstallationError + + entries_path = os.path.join(location, cls.dirname, "entries") + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = "" + + url = None + if data.startswith("8") or data.startswith("9") or data.startswith("10"): + entries = list(map(str.splitlines, data.split("\n\x0c\n"))) + del entries[0][0] # get rid of the '8' + url = entries[0][3] + revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0] + elif data.startswith("= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. + xml = cls.run_command( + ["info", "--xml", location], + show_stdout=False, + stdout_only=True, + ) + match = _svn_info_xml_url_re.search(xml) + assert match is not None + url = match.group(1) + revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """Always assume the versions don't match""" + return False + + def __init__(self, use_interactive: bool = None) -> None: + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version: Optional[Tuple[int, ...]] = None + + super().__init__() + + def call_vcs_version(self) -> Tuple[int, ...]: + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0) + # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2 + version_prefix = "svn, version " + version = self.run_command(["--version"], show_stdout=False, stdout_only=True) + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix) :].split()[0] + version_list = version.partition("-")[0].split(".") + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self) -> Tuple[int, ...]: + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self) -> CommandArgs: + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ["--non-interactive"] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ["--force-interactive"] + + return [] + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + rev_display = rev_options.to_display() + logger.info( + "Checking out %s%s to %s", + url, + rev_display, + display_path(dest), + ) + if verbosity <= 0: + flag = "--quiet" + else: + flag = "" + cmd_args = make_command( + "checkout", + flag, + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command( + "switch", + self.get_remote_call_options(), + rev_options.to_args(), + url, + dest, + ) + self.run_command(cmd_args) + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + cmd_args = make_command( + "update", + self.get_remote_call_options(), + rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + + +vcs.register(Subversion) diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 000000000..02bbf68e7 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,705 @@ +"""Handles all VCS (version control) support""" + +import logging +import os +import shutil +import sys +import urllib.parse +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + Union, +) + +from pip._internal.cli.spinners import SpinnerInterface +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import ( + HiddenText, + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + is_installable_dir, + rmtree, +) +from pip._internal.utils.subprocess import ( + CommandArgs, + call_subprocess, + format_command_args, + make_command, +) +from pip._internal.utils.urls import get_url_scheme + +if TYPE_CHECKING: + # Literal was introduced in Python 3.8. + # + # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7. + from typing import Literal + + +__all__ = ["vcs"] + + +logger = logging.getLogger(__name__) + +AuthInfo = Tuple[Optional[str], Optional[str]] + + +def is_url(name: str) -> bool: + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes + + +def make_vcs_requirement_url( + repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None +) -> str: + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = project_name.replace("-", "_") + req = f"{repo_url}@{rev}#egg={egg_project_name}" + if subdir: + req += f"&subdirectory={subdir}" + + return req + + +def find_path_to_project_root_from_repo_root( + location: str, repo_root: str +) -> Optional[str]: + """ + Find the the Python project's root by searching up the filesystem from + `location`. Return the path to project root relative to `repo_root`. + Return None if the project root is `repo_root`, or cannot be found. + """ + # find project root. + orig_location = location + while not is_installable_dir(location): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding a Python project. + logger.warning( + "Could not find a Python project for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if os.path.samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RemoteNotValidError(Exception): + def __init__(self, url: str): + super().__init__(url) + self.url = url + + +class RevOptions: + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class: Type["VersionControl"], + rev: Optional[str] = None, + extra_args: Optional[CommandArgs] = None, + ) -> None: + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name: Optional[str] = None + + def __repr__(self) -> str: + return f"" + + @property + def arg_rev(self) -> Optional[str]: + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self) -> CommandArgs: + """ + Return the VCS-specific command arguments. + """ + args: CommandArgs = [] + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self) -> str: + if not self.rev: + return "" + + return f" (to revision {self.rev})" + + def make_new(self, rev: str) -> "RevOptions": + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport: + _registry: Dict[str, "VersionControl"] = {} + schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"] + + def __init__(self) -> None: + # Register more schemes with urlparse for various version control + # systems + urllib.parse.uses_netloc.extend(self.schemes) + super().__init__() + + def __iter__(self) -> Iterator[str]: + return self._registry.__iter__() + + @property + def backends(self) -> List["VersionControl"]: + return list(self._registry.values()) + + @property + def dirnames(self) -> List[str]: + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self) -> List[str]: + schemes: List[str] = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls: Type["VersionControl"]) -> None: + if not hasattr(cls, "name"): + logger.warning("Cannot register VCS %s", cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug("Registered VCS backend: %s", cls.name) + + def unregister(self, name: str) -> None: + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + vcs_backends = {} + for vcs_backend in self._registry.values(): + repo_path = vcs_backend.get_repository_root(location) + if not repo_path: + continue + logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name) + vcs_backends[repo_path] = vcs_backend + + if not vcs_backends: + return None + + # Choose the VCS in the inner-most directory. Since all repository + # roots found here would be either `location` or one of its + # parents, the longest path should have the most path components, + # i.e. the backend representing the inner-most repository. + inner_most_repo_path = max(vcs_backends, key=len) + return vcs_backends[inner_most_repo_path] + + def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name: str) -> Optional["VersionControl"]: + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl: + name = "" + dirname = "" + repo_name = "" + # List of supported schemes for this Version Control + schemes: Tuple[str, ...] = () + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ: Tuple[str, ...] = () + default_arg_rev: Optional[str] = None + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url: str) -> bool: + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith(f"{cls.name}:") + + @classmethod + def get_subdirectory(cls, location: str) -> Optional[str]: + """ + Return the path to Python project root, relative to the repo root. + Return None if the project root is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir: str) -> str: + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir: str, project_name: str) -> str: + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = f"{cls.name}+{repo_url}" + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev: str) -> List[str]: + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url: str, dest: str) -> bool: + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options( + cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None + ) -> RevOptions: + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo: str) -> bool: + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + @classmethod + def get_netloc_and_auth( + cls, netloc: str, scheme: str + ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]: + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + if "+" not in scheme: + raise ValueError( + "Sorry, {!r} is a malformed VCS url. " + "The format is +://, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + ) + # Remove the vcs prefix. + scheme = scheme.split("+", 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if "@" in path: + path, rev = path.rsplit("@", 1) + if not rev: + raise InstallationError( + "The URL {!r} has an empty revision (after @) " + "which is not supported. Include a revision after @ " + "or remove @ from the URL.".format(url) + ) + url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) + return url, rev, user_pass + + @staticmethod + def make_rev_args( + username: Optional[str], password: Optional[HiddenText] + ) -> CommandArgs: + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]: + """ + Return the URL and RevOptions object to use in obtain(), + as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password: Optional[HiddenText] = None + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url: str) -> str: + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib.parse.unquote(url).rstrip("/") + + @classmethod + def compare_urls(cls, url1: str, url2: str) -> bool: + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return cls.normalize_url(url1) == cls.normalize_url(url2) + + def fetch_new( + self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int + ) -> None: + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + verbosity: verbosity level. + """ + raise NotImplementedError + + def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None: + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + :param verbosity: verbosity level. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + "%s in %s exists, and has correct URL (%s)", + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + "Updating %s %s%s", + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info("Skipping because already up-to-date.") + return + + logger.warning( + "%s %s in %s exists with URL %s", + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b")) + else: + logger.warning( + "Directory %s already exists, and is not a %s %s.", + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore + + logger.warning( + "The plan is to install the %s repository %s", + self.name, + url, + ) + response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1]) + + if response == "a": + sys.exit(-1) + + if response == "w": + logger.warning("Deleting %s", display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + if response == "b": + dest_dir = backup_dir(dest) + logger.warning("Backing up %s to %s", display_path(dest), dest_dir) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options, verbosity=verbosity) + return + + # Do nothing if the response is "i". + if response == "s": + logger.info( + "Switching %s %s to %s%s", + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location: str, url: HiddenText, verbosity: int) -> None: + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + :param verbosity: verbosity level. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url, verbosity=verbosity) + + @classmethod + def get_remote_url(cls, location: str) -> str: + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location: str) -> str: + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd: Union[List[str], CommandArgs], + show_stdout: bool = True, + cwd: Optional[str] = None, + on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise", + extra_ok_returncodes: Optional[Iterable[int]] = None, + command_desc: Optional[str] = None, + extra_environ: Optional[Mapping[str, Any]] = None, + spinner: Optional[SpinnerInterface] = None, + log_failed_cmd: bool = True, + stdout_only: bool = False, + ) -> str: + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + if command_desc is None: + command_desc = format_command_args(cmd) + try: + return call_subprocess( + cmd, + show_stdout, + cwd, + on_returncode=on_returncode, + extra_ok_returncodes=extra_ok_returncodes, + command_desc=command_desc, + extra_environ=extra_environ, + unset_environ=cls.unset_environ, + spinner=spinner, + log_failed_cmd=log_failed_cmd, + stdout_only=stdout_only, + ) + except FileNotFoundError: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + raise BadCommand( + f"Cannot find command {cls.name!r} - do you have " + f"{cls.name!r} installed and in your PATH?" + ) + except PermissionError: + # errno.EACCES = Permission denied + # This error occurs, for instance, when the command is installed + # only for another user. So, the current user don't have + # permission to call the other user command. + raise BadCommand( + f"No permission to execute {cls.name!r} - install it " + f"locally, globally (ask admin), or check your PATH. " + f"See possible solutions at " + f"https://pip.pypa.io/en/latest/reference/pip_freeze/" + f"#fixing-permission-denied." + ) + + @classmethod + def is_repository_directory(cls, path: str) -> bool: + """ + Return whether a directory path is a repository directory. + """ + logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def get_repository_root(cls, location: str) -> Optional[str]: + """ + Return the "root" (top-level) directory controlled by the vcs, + or `None` if the directory is not in any. + + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For + example, the Git override checks that Git is actually available. + """ + if cls.is_repository_directory(location): + return location + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_internal/wheel_builder.py b/myenv/lib/python3.10/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 000000000..d0663443b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,377 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +import logging +import os.path +import re +import shutil +from typing import Any, Callable, Iterable, List, Optional, Tuple + +from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version +from pip._vendor.packaging.version import InvalidVersion, Version + +from pip._internal.cache import WheelCache +from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel +from pip._internal.metadata import FilesystemWheel, get_wheel_distribution +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_editable import build_wheel_editable +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +logger = logging.getLogger(__name__) + +_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) + +BinaryAllowedPredicate = Callable[[InstallRequirement], bool] +BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + + +def _contains_egg_info(s: str) -> bool: + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req: InstallRequirement, + need_wheel: bool, + check_binary_allowed: BinaryAllowedPredicate, +) -> bool: + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + "Skipping %s, due to already being wheel.", + req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if not req.source_dir: + return False + + if req.editable: + # we only build PEP 660 editable requirements + return req.supports_pyproject_editable() + + if req.use_pep517: + return True + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries being disabled for it.", + req.name, + ) + return False + + if not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + logger.info( + "Using legacy 'setup.py install' for %s, " + "since package 'wheel' is not installed.", + req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req: InstallRequirement, +) -> bool: + return _should_build(req, need_wheel=True, check_binary_allowed=_always_true) + + +def should_build_for_install_command( + req: InstallRequirement, + check_binary_allowed: BinaryAllowedPredicate, +) -> bool: + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req: InstallRequirement, +) -> Optional[bool]: + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if req.editable or not req.source_dir: + # never cache editable requirements + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + assert req.link + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req: InstallRequirement, + wheel_cache: WheelCache, +) -> str: + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + assert req.link + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_: Any) -> bool: + return True + + +def _verify_one(req: InstallRequirement, wheel_path: str) -> None: + canonical_name = canonicalize_name(req.name or "") + w = Wheel(os.path.basename(wheel_path)) + if canonicalize_name(w.name) != canonical_name: + raise InvalidWheelFilename( + "Wheel has unexpected file name: expected {!r}, " + "got {!r}".format(canonical_name, w.name), + ) + dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) + dist_verstr = str(dist.version) + if canonicalize_version(dist_verstr) != canonicalize_version(w.version): + raise InvalidWheelFilename( + "Wheel has unexpected file name: expected {!r}, " + "got {!r}".format(dist_verstr, w.version), + ) + metadata_version_value = dist.metadata_version + if metadata_version_value is None: + raise UnsupportedWheel("Missing Metadata-Version") + try: + metadata_version = Version(metadata_version_value) + except InvalidVersion: + msg = f"Invalid Metadata-Version: {metadata_version_value}" + raise UnsupportedWheel(msg) + if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): + raise UnsupportedWheel( + "Metadata 1.2 mandates PEP 440 version, " + "but {!r} is not".format(dist_verstr) + ) + + +def _build_one( + req: InstallRequirement, + output_dir: str, + verify: bool, + build_options: List[str], + global_options: List[str], + editable: bool, +) -> Optional[str]: + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + artifact = "editable" if editable else "wheel" + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building %s for %s failed: %s", + artifact, + req.name, + e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + wheel_path = _build_one_inside_env( + req, output_dir, build_options, global_options, editable + ) + if wheel_path and verify: + try: + _verify_one(req, wheel_path) + except (InvalidWheelFilename, UnsupportedWheel) as e: + logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e) + return None + return wheel_path + + +def _build_one_inside_env( + req: InstallRequirement, + output_dir: str, + build_options: List[str], + global_options: List[str], + editable: bool, +) -> Optional[str]: + with TempDirectory(kind="wheel") as temp_dir: + assert req.name + if req.use_pep517: + assert req.metadata_directory + assert req.pep517_backend + if global_options: + logger.warning( + "Ignoring --global-option when building %s using PEP 517", req.name + ) + if build_options: + logger.warning( + "Ignoring --build-option when building %s using PEP 517", req.name + ) + if editable: + wheel_path = build_wheel_editable( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info( + "Created wheel for %s: filename=%s size=%d sha256=%s", + req.name, + wheel_name, + length, + wheel_hash.hexdigest(), + ) + logger.info("Stored in directory: %s", output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, + e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool: + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info("Running setup.py clean for %s", req.name) + try: + call_subprocess( + clean_args, command_desc="python setup.py clean", cwd=req.source_dir + ) + return True + except Exception: + logger.error("Failed cleaning build dir for %s", req.name) + return False + + +def build( + requirements: Iterable[InstallRequirement], + wheel_cache: WheelCache, + verify: bool, + build_options: List[str], + global_options: List[str], +) -> BuildResult: + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + "Building wheels for collected packages: %s", + ", ".join(req.name for req in requirements), # type: ignore + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + assert req.name + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, + cache_dir, + verify, + build_options, + global_options, + req.editable and req.permit_editable_wheels, + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + "Successfully built %s", + " ".join([req.name for req in build_successes]), # type: ignore + ) + if build_failures: + logger.info( + "Failed to build %s", + " ".join([req.name for req in build_failures]), # type: ignore + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/__init__.py new file mode 100644 index 000000000..3843cb099 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,111 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = False + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("certifi") + vendored("colorama") + vendored("distlib") + vendored("distro") + vendored("html5lib") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("six.moves.urllib.parse") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pep517") + vendored("pkg_resources") + vendored("platformdirs") + vendored("progress") + vendored("requests") + vendored("requests.exceptions") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") + vendored("resolvelib") + vendored("tenacity") + vendored("tomli") + vendored("urllib3") diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..eabd49c91 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/distro.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/distro.cpython-310.pyc new file mode 100644 index 000000000..00d13403a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/distro.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/six.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/six.cpython-310.pyc new file mode 100644 index 000000000..779394835 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/six.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc new file mode 100644 index 000000000..530b960dd Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 000000000..8435d628d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,18 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.12.10" + +from .wrapper import CacheControl +from .adapter import CacheControlAdapter +from .controller import CacheController + +import logging +logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..468ce292d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc new file mode 100644 index 000000000..2a5885b26 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc new file mode 100644 index 000000000..131a029d8 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc new file mode 100644 index 000000000..14b37f48c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc new file mode 100644 index 000000000..54e79c5b4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc new file mode 100644 index 000000000..4d3a5a903 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc new file mode 100644 index 000000000..11c93e68f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc new file mode 100644 index 000000000..a57fc97e7 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc new file mode 100644 index 000000000..f85707754 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc new file mode 100644 index 000000000..2f63377f6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 000000000..4266b5ee9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import logging + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +from argparse import ArgumentParser + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session(): + adapter = CacheControlAdapter( + DictCache(), cache_etags=True, serializer=None, heuristic=None + ) + sess = requests.Session() + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + sess.cache_controller = adapter.controller + return sess + + +def get_args(): + parser = ArgumentParser() + parser.add_argument("url", help="The URL to try and cache") + return parser.parse_args() + + +def main(args=None): + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + sess.cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if sess.cache_controller.cached_request(resp.request): + print("Cached!") + else: + print("Not cached :(") + + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 000000000..94c75e1a0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,137 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import types +import functools +import zlib + +from pip._vendor.requests.adapters import HTTPAdapter + +from .controller import CacheController, PERMANENT_REDIRECT_STATUSES +from .cache import DictCache +from .filewrapper import CallbackFileWrapper + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = {"PUT", "PATCH", "DELETE"} + + def __init__( + self, + cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + cacheable_methods=None, + *args, + **kw + ): + super(CacheControlAdapter, self).__init__(*args, **kw) + self.cache = DictCache() if cache is None else cache + self.heuristic = heuristic + self.cacheable_methods = cacheable_methods or ("GET",) + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, cache_etags=cache_etags, serializer=serializer + ) + + def send(self, request, cacheable_methods=None, **kw): + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + cacheable = cacheable_methods or self.cacheable_methods + if request.method in cacheable: + try: + cached_response = self.controller.cached_request(request) + except zlib.error: + cached_response = None + if cached_response: + return self.build_response(request, cached_response, from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update(self.controller.conditional_headers(request)) + + resp = super(CacheControlAdapter, self).send(request, **kw) + + return resp + + def build_response( + self, request, response, from_cache=False, cacheable_methods=None + ): + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + cacheable = cacheable_methods or self.cacheable_methods + if not from_cache and request.method in cacheable: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( + response._fp, + functools.partial( + self.controller.cache_response, request, response + ), + ) + if response.chunked: + super_update_chunk_length = response._update_chunk_length + + def _update_chunk_length(self): + super_update_chunk_length() + if self.chunk_left == 0: + self._fp._close() + + response._update_chunk_length = types.MethodType( + _update_chunk_length, response + ) + + resp = super(CacheControlAdapter, self).build_response(request, response) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache + + return resp + + def close(self): + self.cache.close() + super(CacheControlAdapter, self).close() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 000000000..44e4309d2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,43 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from threading import Lock + + +class BaseCache(object): + + def get(self, key): + raise NotImplementedError() + + def set(self, key, value, expires=None): + raise NotImplementedError() + + def delete(self, key): + raise NotImplementedError() + + def close(self): + pass + + +class DictCache(BaseCache): + + def __init__(self, init_dict=None): + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key): + return self.data.get(key, None) + + def set(self, key, value, expires=None): + with self.lock: + self.data.update({key: value}) + + def delete(self, key): + with self.lock: + if key in self.data: + self.data.pop(key) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 000000000..44becd684 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,6 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from .file_cache import FileCache # noqa +from .redis_cache import RedisCache # noqa diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..2e5c77372 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc new file mode 100644 index 000000000..ba2eb57a5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc new file mode 100644 index 000000000..579dbf291 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 000000000..6cd1106f8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,150 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import hashlib +import os +from textwrap import dedent + +from ..cache import BaseCache +from ..controller import CacheController + +try: + FileNotFoundError +except NameError: + # py2.X + FileNotFoundError = (IOError, OSError) + + +def _secure_open_write(filename, fmode): + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except (IOError, OSError): + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class FileCache(BaseCache): + + def __init__( + self, + directory, + forever=False, + filemode=0o0600, + dirmode=0o0700, + use_dir_lock=None, + lock_class=None, + ): + + if use_dir_lock is not None and lock_class is not None: + raise ValueError("Cannot use use_dir_lock and lock_class together") + + try: + from lockfile import LockFile + from lockfile.mkdirlockfile import MkdirLockFile + except ImportError: + notice = dedent( + """ + NOTE: In order to use the FileCache you must have + lockfile installed. You can install it via pip: + pip install lockfile + """ + ) + raise ImportError(notice) + + else: + if use_dir_lock: + lock_class = MkdirLockFile + + elif lock_class is None: + lock_class = LockFile + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + @staticmethod + def encode(x): + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name): + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + name = self._fn(key) + try: + with open(name, "rb") as fh: + return fh.read() + + except FileNotFoundError: + return None + + def set(self, key, value, expires=None): + name = self._fn(key) + + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(name), self.dirmode) + except (IOError, OSError): + pass + + with self.lock_class(name) as lock: + # Write our actual file + with _secure_open_write(lock.path, self.filemode) as fh: + fh.write(value) + + def delete(self, key): + name = self._fn(key) + if not self.forever: + try: + os.remove(name) + except FileNotFoundError: + pass + + +def url_to_file_path(url, filecache): + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 000000000..720b507c5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import division + +from datetime import datetime +from pip._vendor.cachecontrol.cache import BaseCache + + +class RedisCache(BaseCache): + + def __init__(self, conn): + self.conn = conn + + def get(self, key): + return self.conn.get(key) + + def set(self, key, value, expires=None): + if not expires: + self.conn.set(key, value) + else: + expires = expires - datetime.utcnow() + self.conn.setex(key, int(expires.total_seconds()), value) + + def delete(self, key): + self.conn.delete(key) + + def clear(self): + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self): + """Redis uses connection pooling, no need to close the connection.""" + pass diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 000000000..ccec9379d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py @@ -0,0 +1,32 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +try: + from urllib.parse import urljoin +except ImportError: + from urlparse import urljoin + + +try: + import cPickle as pickle +except ImportError: + import pickle + +# Handle the case where the requests module has been patched to not have +# urllib3 bundled as part of its source. +try: + from pip._vendor.requests.packages.urllib3.response import HTTPResponse +except ImportError: + from pip._vendor.urllib3.response import HTTPResponse + +try: + from pip._vendor.requests.packages.urllib3.util import is_fp_closed +except ImportError: + from pip._vendor.urllib3.util import is_fp_closed + +# Replicate some six behaviour +try: + text_type = unicode +except NameError: + text_type = str diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 000000000..d7e738028 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,415 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The httplib2 algorithms ported for use with requests. +""" +import logging +import re +import calendar +import time +from email.utils import parsedate_tz + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .cache import DictCache +from .serialize import Serializer + + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +PERMANENT_REDIRECT_STATUSES = (301, 308) + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController(object): + """An interface to see if request should cached or not. + """ + + def __init__( + self, cache=None, cache_etags=True, serializer=None, status_codes=None + ): + self.cache = DictCache() if cache is None else cache + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) + + @classmethod + def _urlnorm(cls, uri): + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri): + return cls._urlnorm(uri) + + def parse_cache_control(self, headers): + known_directives = { + # https://tools.ietf.org/html/rfc7234#section-5.2 + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), + } + + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) + + retval = {} + + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) + directive = parts[0].strip() + + try: + typ, required = known_directives[directive] + except KeyError: + logger.debug("Ignoring unknown cache-control directive: %s", directive) + continue + + if not typ or not required: + retval[directive] = None + if typ: + try: + retval[directive] = typ(parts[1].strip()) + except IndexError: + if required: + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) + except ValueError: + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) + + return retval + + def cached_request(self, request): + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if "no-cache" in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if "max-age" in cc and cc["max-age"] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Request allows serving from the cache, let's see if we find something + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return False + + # Check whether it can be deserialized + resp = self.serializer.loads(request, cache_data) + if not resp: + logger.warning("Cache entry deserialization failed, entry ignored") + return False + + # If we have a cached permanent redirect, return it immediately. We + # don't need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if int(resp.status) in PERMANENT_REDIRECT_STATUSES: + msg = ( + "Returning cached permanent redirect response " + "(ignoring date and etag information)" + ) + logger.debug(msg) + return resp + + headers = CaseInsensitiveDict(resp.headers) + if not headers or "date" not in headers: + if "etag" not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug("Purging cached response: no date or etag") + self.cache.delete(cache_url) + logger.debug("Ignoring cached response: no date") + return False + + now = time.time() + date = calendar.timegm(parsedate_tz(headers["date"])) + current_age = max(0, now - date) + logger.debug("Current age based on date: %i", current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + if "max-age" in resp_cc: + freshness_lifetime = resp_cc["max-age"] + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) + if expires is not None: + expire_time = calendar.timegm(expires) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + if "max-age" in cc: + freshness_lifetime = cc["max-age"] + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) + + if "min-fresh" in cc: + min_fresh = cc["min-fresh"] + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug("Adjusted current age from min-fresh: %i", current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug("%i > %i", freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request): + cache_url = self.cache_url(request.url) + resp = self.serializer.loads(request, self.cache.get(cache_url)) + new_headers = {} + + if resp: + headers = CaseInsensitiveDict(resp.headers) + + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] + + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] + + return new_headers + + def cache_response(self, request, response, body=None, status_codes=None): + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = status_codes or self.cacheable_status_codes + if response.status not in cacheable_status_codes: + logger.debug( + "Status code %s not in %s", response.status, cacheable_status_codes + ) + return + + response_headers = CaseInsensitiveDict(response.headers) + + if "date" in response_headers: + date = calendar.timegm(parsedate_tz(response_headers["date"])) + else: + date = 0 + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if "no-store" in cc: + no_store = True + logger.debug('Response header has "no-store"') + if "no-store" in cc_req: + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + if no_store: + return + + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + + # If we've been given an etag, then keep the response + if self.cache_etags and "etag" in response_headers: + expires_time = 0 + if response_headers.get("expires"): + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires) - date + + expires_time = max(expires_time, 14 * 86400) + + logger.debug("etag object cached for {0} seconds".format(expires_time)) + logger.debug("Caching due to etag") + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + + # Add to the cache any permanent redirects. We do this before looking + # that the Date headers. + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + logger.debug("Caching permanent redirect") + self.cache.set(cache_url, self.serializer.dumps(request, response, b"")) + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif "date" in response_headers: + date = calendar.timegm(parsedate_tz(response_headers["date"])) + # cache when there is a max-age > 0 + if "max-age" in cc and cc["max-age"] > 0: + logger.debug("Caching b/c date exists and max-age > 0") + expires_time = cc["max-age"] + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif "expires" in response_headers: + if response_headers["expires"]: + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires) - date + else: + expires_time = None + + logger.debug( + "Caching b/c of expires header. expires in {0} seconds".format( + expires_time + ) + ) + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + expires=expires_time, + ) + + def update_cached_response(self, request, response): + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + cache_url = self.cache_url(request.url) + + cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = ["content-length"] + + cached_response.headers.update( + dict( + (k, v) + for k, v in response.headers.items() + if k.lower() not in excluded_headers + ) + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) + + return cached_response diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 000000000..f5ed5f6f6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,111 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from tempfile import NamedTemporaryFile +import mmap + + +class CallbackFileWrapper(object): + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + + The data is stored in a temporary file until it is all available. As long + as the temporary files directory is disk-based (sometimes it's a + memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory + pressure is high. For small files the disk usually won't be used at all, + it'll all be in the filesystem memory cache, so there should be no + performance impact. + """ + + def __init__(self, fp, callback): + self.__buf = NamedTemporaryFile("rb+", delete=True) + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name): + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__("_CallbackFileWrapper__fp") + return getattr(fp, name) + + def __is_fp_closed(self): + try: + return self.__fp.fp is None + + except AttributeError: + pass + + try: + return self.__fp.closed + + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self): + if self.__callback: + if self.__buf.tell() == 0: + # Empty file: + result = b"" + else: + # Return the data without actually loading it into memory, + # relying on Python's buffer API and mmap(). mmap() just gives + # a view directly into the filesystem's memory cache, so it + # doesn't result in duplicate memory use. + self.__buf.seek(0, 0) + result = memoryview( + mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ) + ) + self.__callback(result) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + # Closing the temporary file releases memory and frees disk space. + # Important when caching big files. + self.__buf.close() + + def read(self, amt=None): + data = self.__fp.read(amt) + if data: + # We may be dealing with b'', a sign that things are over: + # it's passed e.g. after we've already closed self.__buf. + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt): + data = self.__fp._safe_read(amt) + if amt == 2 and data == b"\r\n": + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 000000000..ebe4a96f5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,139 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import calendar +import time + +from email.utils import formatdate, parsedate, parsedate_tz + +from datetime import datetime, timedelta + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta, date=None): + date = date or datetime.utcnow() + return date + delta + + +def datetime_to_header(dt): + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic(object): + + def warning(self, response): + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response): + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response): + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({"Warning": warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + + def update_headers(self, response): + headers = {} + + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw): + self.delta = timedelta(**kw) + + def update_headers(self, response): + expires = expire_after(self.delta) + return {"expires": datetime_to_header(expires), "cache-control": "public"} + + def warning(self, response): + tmpl = "110 - Automatically cached for %s. Response might be stale" + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + cacheable_by_default_statuses = { + 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + } + + def update_headers(self, resp): + headers = resp.headers + + if "expires" in headers: + return {} + + if "cache-control" in headers and headers["cache-control"] != "public": + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if "date" not in headers or "last-modified" not in headers: + return {} + + date = calendar.timegm(parsedate_tz(headers["date"])) + last_modified = parsedate(headers["last-modified"]) + if date is None or last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp): + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 000000000..b075df186 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,186 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import base64 +import io +import json +import zlib + +from pip._vendor import msgpack +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .compat import HTTPResponse, pickle, text_type + + +def _b64_decode_bytes(b): + return base64.b64decode(b.encode("ascii")) + + +def _b64_decode_str(s): + return _b64_decode_bytes(s).decode("utf8") + + +_default_body_read = object() + + +class Serializer(object): + def dumps(self, request, response, body=None): + response_headers = CaseInsensitiveDict(response.headers) + + if body is None: + # When a body isn't passed in, we'll read the response. We + # also update the response with a new file handler to be + # sure it acts as though it was never read. + body = response.read(decode_content=False) + response._fp = io.BytesIO(body) + + # NOTE: This is all a bit weird, but it's really important that on + # Python 2.x these objects are unicode and not str, even when + # they contain only ascii. The problem here is that msgpack + # understands the difference between unicode and bytes and we + # have it set to differentiate between them, however Python 2 + # doesn't know the difference. Forcing these to unicode will be + # enough to have msgpack know the difference. + data = { + u"response": { + u"body": body, + u"headers": dict( + (text_type(k), text_type(v)) for k, v in response.headers.items() + ), + u"status": response.status, + u"version": response.version, + u"reason": text_type(response.reason), + u"strict": response.strict, + u"decode_content": response.decode_content, + } + } + + # Construct our vary headers + data[u"vary"] = {} + if u"vary" in response_headers: + varied_headers = response_headers[u"vary"].split(",") + for header in varied_headers: + header = text_type(header).strip() + header_value = request.headers.get(header, None) + if header_value is not None: + header_value = text_type(header_value) + data[u"vary"][header] = header_value + + return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + + def loads(self, request, data): + # Short circuit if we've been given an empty set of data + if not data: + return + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + ver = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, "_loads_v{}".format(ver))(request, data) + + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return + + def prepare_response(self, request, cached): + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. + if "*" in cached.get("vary", {}): + return + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return + + body_raw = cached["response"].pop("body") + + headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") + + cached["response"]["headers"] = headers + + try: + body = io.BytesIO(body_raw) + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode("utf8")) + + return HTTPResponse(body=body, preload_content=False, **cached["response"]) + + def _loads_v0(self, request, data): + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return + + def _loads_v1(self, request, data): + try: + cached = pickle.loads(data) + except ValueError: + return + + return self.prepare_response(request, cached) + + def _loads_v2(self, request, data): + try: + cached = json.loads(zlib.decompress(data).decode("utf8")) + except (ValueError, zlib.error): + return + + # We need to decode the items that we've base64 encoded + cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) + cached["response"]["headers"] = dict( + (_b64_decode_str(k), _b64_decode_str(v)) + for k, v in cached["response"]["headers"].items() + ) + cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) + cached["vary"] = dict( + (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + for k, v in cached["vary"].items() + ) + + return self.prepare_response(request, cached) + + def _loads_v3(self, request, data): + # Due to Python 2 encoding issues, it's impossible to know for sure + # exactly how to load v3 entries, thus we'll treat these as a miss so + # that they get rewritten out as v4 entries. + return + + def _loads_v4(self, request, data): + try: + cached = msgpack.loads(data, raw=False) + except ValueError: + return + + return self.prepare_response(request, cached) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 000000000..b6ee7f203 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,33 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from .adapter import CacheControlAdapter +from .cache import DictCache + + +def CacheControl( + sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None, + controller_class=None, + adapter_class=None, + cacheable_methods=None, +): + + cache = DictCache() if cache is None else cache + adapter_class = adapter_class or CacheControlAdapter + adapter = adapter_class( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + controller_class=controller_class, + cacheable_methods=cacheable_methods, + ) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + return sess diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__init__.py new file mode 100644 index 000000000..8db1a0e55 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__init__.py @@ -0,0 +1,3 @@ +from .core import contents, where + +__version__ = "2021.10.08" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__main__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__main__.py new file mode 100644 index 000000000..00376349e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from pip._vendor.certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..536c034a8 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 000000000..575985d8d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-310.pyc new file mode 100644 index 000000000..214916de0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/cacert.pem b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/cacert.pem new file mode 100644 index 000000000..6d0ccc0d1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/cacert.pem @@ -0,0 +1,4362 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/core.py b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/core.py new file mode 100644 index 000000000..f8d4313d3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/certifi/core.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- + +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import os + + +class _PipPatchedCertificate(Exception): + pass + + +DEBIAN_CA_CERTS_PATH = '/etc/ssl/certs/ca-certificates.crt' + +try: + # Return a certificate file on disk for a standalone pip zipapp running in + # an isolated build environment to use. Passing --cert to the standalone + # pip does not work since requests calls where() unconditionally on import. + _PIP_STANDALONE_CERT = os.environ.get("_PIP_STANDALONE_CERT") + if _PIP_STANDALONE_CERT: + def where(): + return _PIP_STANDALONE_CERT + raise _PipPatchedCertificate() + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where(): + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_PATH = DEBIAN_CA_CERTS_PATH + + return _CACERT_PATH + +except _PipPatchedCertificate: + pass + +except ImportError: + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text(_module, _path, encoding="ascii"): + with open(where(), "r", encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where(): + return DEBIAN_CA_CERTS_PATH + + +def contents(): + with open(where(), "r", encoding="ascii") as data: + return data.read() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__init__.py new file mode 100644 index 000000000..80ad2546d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__init__.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .universaldetector import UniversalDetector +from .enums import InputState +from .version import __version__, VERSION + + +__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION'] + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() + + +def detect_all(byte_str): + """ + Detect all the possible encodings of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + + detector = UniversalDetector() + detector.feed(byte_str) + detector.close() + + if detector._input_state == InputState.HIGH_BYTE: + results = [] + for prober in detector._charset_probers: + if prober.get_confidence() > detector.MINIMUM_THRESHOLD: + charset_name = prober.charset_name + lower_charset_name = prober.charset_name.lower() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if detector._has_win_bytes: + charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + results.append({ + 'encoding': charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language, + }) + if len(results) > 0: + return sorted(results, key=lambda result: -result['confidence']) + + return [detector.result] diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..bfabb0990 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc new file mode 100644 index 000000000..bd2f97578 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc new file mode 100644 index 000000000..3904e3c2e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc new file mode 100644 index 000000000..f039c2e7d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc new file mode 100644 index 000000000..6c30c004f Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc new file mode 100644 index 000000000..c1e69dc4c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc new file mode 100644 index 000000000..364c73771 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc new file mode 100644 index 000000000..b115bb394 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc new file mode 100644 index 000000000..c8420bf4e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc new file mode 100644 index 000000000..d5660ec3e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc new file mode 100644 index 000000000..e88d688ba Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc new file mode 100644 index 000000000..d18e3377e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc new file mode 100644 index 000000000..d3d56d5d6 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc new file mode 100644 index 000000000..02183a99d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc new file mode 100644 index 000000000..fc7a0caaf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc new file mode 100644 index 000000000..adb315fb0 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc new file mode 100644 index 000000000..b1ebfecb7 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc new file mode 100644 index 000000000..813a4a2b3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc new file mode 100644 index 000000000..ffc8eaf2c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc new file mode 100644 index 000000000..6e6bd0c0a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-310.pyc new file mode 100644 index 000000000..5a870dffc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc new file mode 100644 index 000000000..7d6d1e011 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc new file mode 100644 index 000000000..f7670eab5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc new file mode 100644 index 000000000..8946e3904 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc new file mode 100644 index 000000000..ff9aa6a06 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc new file mode 100644 index 000000000..5dc1a9374 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-310.pyc new file mode 100644 index 000000000..36dfbaa41 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc new file mode 100644 index 000000000..851230716 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc new file mode 100644 index 000000000..340ac2016 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc new file mode 100644 index 000000000..b3ecb3e80 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc new file mode 100644 index 000000000..ecad870cf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc new file mode 100644 index 000000000..4299d2f28 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc new file mode 100644 index 000000000..d2e47f49a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc new file mode 100644 index 000000000..d3cc0244a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc new file mode 100644 index 000000000..bced29482 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc new file mode 100644 index 000000000..1c56f7ada Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc new file mode 100644 index 000000000..07354faef Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc new file mode 100644 index 000000000..2b6166cf9 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-310.pyc new file mode 100644 index 000000000..e6467bf7c Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/big5freq.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/big5freq.py new file mode 100644 index 000000000..38f32517a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/big5prober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/big5prober.py new file mode 100644 index 000000000..98f997012 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/chardistribution.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/chardistribution.py new file mode 100644 index 000000000..c0395f4a4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/charsetgroupprober.py new file mode 100644 index 000000000..5812cef0b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/charsetgroupprober.py @@ -0,0 +1,107 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + self._state = ProbingState.FOUND_IT + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/charsetprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/charsetprober.py new file mode 100644 index 000000000..eac4e5986 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..26bb4544d Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-310.pyc new file mode 100644 index 000000000..1199638cc Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/chardetect.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/chardetect.py new file mode 100644 index 000000000..6d6f93aab --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cli/chardetect.py @@ -0,0 +1,84 @@ +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from pip._vendor.chardet import __version__ +from pip._vendor.chardet.compat import PY2 +from pip._vendor.chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{}: {} with confidence {}'.format(name, result['encoding'], + result['confidence']) + else: + return '{}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/codingstatemachine.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/codingstatemachine.py new file mode 100644 index 000000000..68fba44f1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/compat.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/compat.py new file mode 100644 index 000000000..8941572b3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/compat.py @@ -0,0 +1,36 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + string_types = (str, unicode) + text_type = unicode + iteritems = dict.iteritems +else: + PY2 = False + PY3 = True + string_types = (bytes, str) + text_type = str + iteritems = dict.items diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cp949prober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cp949prober.py new file mode 100644 index 000000000..efd793abc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/enums.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/enums.py new file mode 100644 index 000000000..045120722 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escprober.py new file mode 100644 index 000000000..c70493f2b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escsm.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escsm.py new file mode 100644 index 000000000..0069523a0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/eucjpprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/eucjpprober.py new file mode 100644 index 000000000..20ce8f7d1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euckrfreq.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euckrfreq.py new file mode 100644 index 000000000..b68078cb9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euckrprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euckrprober.py new file mode 100644 index 000000000..345a060d0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euctwfreq.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euctwfreq.py new file mode 100644 index 000000000..ed7a995a3 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euctwprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euctwprober.py new file mode 100644 index 000000000..35669cc4d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/gb2312freq.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/gb2312freq.py new file mode 100644 index 000000000..697837bd9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/gb2312prober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/gb2312prober.py new file mode 100644 index 000000000..8446d2dd9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/hebrewprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/hebrewprober.py new file mode 100644 index 000000000..b0e1bf492 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/jisfreq.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/jisfreq.py new file mode 100644 index 000000000..83fc082b5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/jpcntx.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/jpcntx.py new file mode 100644 index 000000000..20044e4bc --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langbulgarianmodel.py new file mode 100644 index 000000000..e963a5097 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langbulgarianmodel.py @@ -0,0 +1,4650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +BULGARIAN_LANG_MODEL = { + 63: { # 'e' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 45: { # '\xad' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 31: { # 'Ð' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 2, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 0, # 'и' + 26: 2, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 1, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 32: { # 'Б' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 2, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 2, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 35: { # 'Ð’' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 2, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 43: { # 'Г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 1, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 37: { # 'Д' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 2, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 44: { # 'Е' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 2, # 'Ф' + 49: 1, # 'Ð¥' + 53: 2, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 0, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 55: { # 'Ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 47: { # 'З' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 2, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 40: { # 'И' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 2, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 2, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 3, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 0, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 59: { # 'Й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 33: { # 'К' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 46: { # 'Л' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 2, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 38: { # 'М' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 36: { # 'Ð' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 2, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 41: { # 'О' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 1, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 1, # 'Й' + 33: 2, # 'К' + 46: 2, # 'Л' + 38: 2, # 'М' + 36: 2, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 0, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 1, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 0, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 2, # 'ч' + 27: 0, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 30: { # 'П' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 2, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 39: { # 'Р' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 2, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 28: { # 'С' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 3, # 'Ð' + 32: 2, # 'Б' + 35: 2, # 'Ð’' + 43: 1, # 'Г' + 37: 2, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 2, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 1, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 34: { # 'Т' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 2, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 2, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 1, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 1, # 'Ъ' + 60: 0, # 'Ю' + 56: 1, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 3, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 51: { # 'У' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 2, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 48: { # 'Ф' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 2, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 1, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 2, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 49: { # 'Ð¥' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 1, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 53: { # 'Ц' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 2, # 'И' + 59: 0, # 'Й' + 33: 2, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 2, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 50: { # 'Ч' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 2, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 2, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 54: { # 'Ш' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 1, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 1, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 2, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 57: { # 'Щ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 1, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 1, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 1, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 61: { # 'Ъ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 1, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 2, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 2, # 'Р' + 28: 1, # 'С' + 34: 1, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 1, # 'Ð¥' + 53: 1, # 'Ц' + 50: 1, # 'Ч' + 54: 1, # 'Ш' + 57: 1, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 1, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 60: { # 'Ю' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 1, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 0, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 2, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 0, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 56: { # 'Я' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 1, # 'Ð’' + 43: 1, # 'Г' + 37: 1, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 1, # 'Л' + 38: 1, # 'М' + 36: 1, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 2, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 1, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 0, # 'о' + 13: 2, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 1: { # 'а' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 1, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 18: { # 'б' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 3, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 9: { # 'в' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 1, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 0, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 20: { # 'г' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 11: { # 'д' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 3: { # 'е' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 23: { # 'ж' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 15: { # 'з' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 2: { # 'и' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 1, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 1, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 1, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 1, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 26: { # 'й' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 2, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 12: { # 'к' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 1, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 1, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 10: { # 'л' + 63: 1, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 1, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 1, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 14: { # 'м' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 1, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 6: { # 'н' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 1, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 2, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 3, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 4: { # 'о' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 2, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 3, # 'и' + 26: 3, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 2, # 'у' + 29: 3, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 3, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 13: { # 'п' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 3, # 'л' + 14: 1, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 7: { # 'Ñ€' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 3, # 'е' + 23: 3, # 'ж' + 15: 2, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 1, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 3, # 'ц' + 21: 2, # 'ч' + 27: 3, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 8: { # 'Ñ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 2, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 2, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 2, # 'ш' + 24: 0, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 5: { # 'Ñ‚' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 2, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 3, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 2, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 3, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 19: { # 'у' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 2, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 2, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 3, # 'ш' + 24: 2, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 29: { # 'Ñ„' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 1, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 2, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 2, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 25: { # 'Ñ…' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 2, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 1, # 'п' + 7: 3, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 22: { # 'ц' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 2, # 'в' + 20: 1, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 1, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 2, # 'к' + 10: 1, # 'л' + 14: 1, # 'м' + 6: 1, # 'н' + 4: 2, # 'о' + 13: 1, # 'п' + 7: 1, # 'Ñ€' + 8: 1, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 1, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 21: { # 'ч' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 1, # 'б' + 9: 3, # 'в' + 20: 1, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 1, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 2, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 1, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 27: { # 'ш' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 2, # 'в' + 20: 0, # 'г' + 11: 1, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 3, # 'к' + 10: 2, # 'л' + 14: 1, # 'м' + 6: 3, # 'н' + 4: 2, # 'о' + 13: 2, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 2, # 'у' + 29: 1, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 1, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 2, # 'ÑŠ' + 52: 1, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 24: { # 'щ' + 63: 1, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 3, # 'а' + 18: 0, # 'б' + 9: 1, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 3, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 3, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 2, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 1, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 3, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 17: { # 'ÑŠ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 3, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 3, # 'ж' + 15: 3, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 3, # 'о' + 13: 3, # 'п' + 7: 3, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 2, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 2, # 'ш' + 24: 3, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 2, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 52: { # 'ÑŒ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 1, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 1, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 1, # 'н' + 4: 3, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 1, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 1, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 42: { # 'ÑŽ' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 1, # 'а' + 18: 2, # 'б' + 9: 1, # 'в' + 20: 2, # 'г' + 11: 2, # 'д' + 3: 1, # 'е' + 23: 2, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 1, # 'й' + 12: 2, # 'к' + 10: 2, # 'л' + 14: 2, # 'м' + 6: 2, # 'н' + 4: 1, # 'о' + 13: 1, # 'п' + 7: 2, # 'Ñ€' + 8: 2, # 'Ñ' + 5: 2, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 1, # 'Ñ…' + 22: 2, # 'ц' + 21: 3, # 'ч' + 27: 1, # 'ш' + 24: 1, # 'щ' + 17: 1, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 16: { # 'Ñ' + 63: 0, # 'e' + 45: 1, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 3, # 'б' + 9: 3, # 'в' + 20: 2, # 'г' + 11: 3, # 'д' + 3: 2, # 'е' + 23: 1, # 'ж' + 15: 2, # 'з' + 2: 1, # 'и' + 26: 2, # 'й' + 12: 3, # 'к' + 10: 3, # 'л' + 14: 3, # 'м' + 6: 3, # 'н' + 4: 1, # 'о' + 13: 2, # 'п' + 7: 2, # 'Ñ€' + 8: 3, # 'Ñ' + 5: 3, # 'Ñ‚' + 19: 1, # 'у' + 29: 1, # 'Ñ„' + 25: 3, # 'Ñ…' + 22: 2, # 'ц' + 21: 1, # 'ч' + 27: 1, # 'ш' + 24: 2, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 58: { # 'Ñ”' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, + 62: { # 'â„–' + 63: 0, # 'e' + 45: 0, # '\xad' + 31: 0, # 'Ð' + 32: 0, # 'Б' + 35: 0, # 'Ð’' + 43: 0, # 'Г' + 37: 0, # 'Д' + 44: 0, # 'Е' + 55: 0, # 'Ж' + 47: 0, # 'З' + 40: 0, # 'И' + 59: 0, # 'Й' + 33: 0, # 'К' + 46: 0, # 'Л' + 38: 0, # 'М' + 36: 0, # 'Ð' + 41: 0, # 'О' + 30: 0, # 'П' + 39: 0, # 'Р' + 28: 0, # 'С' + 34: 0, # 'Т' + 51: 0, # 'У' + 48: 0, # 'Ф' + 49: 0, # 'Ð¥' + 53: 0, # 'Ц' + 50: 0, # 'Ч' + 54: 0, # 'Ш' + 57: 0, # 'Щ' + 61: 0, # 'Ъ' + 60: 0, # 'Ю' + 56: 0, # 'Я' + 1: 0, # 'а' + 18: 0, # 'б' + 9: 0, # 'в' + 20: 0, # 'г' + 11: 0, # 'д' + 3: 0, # 'е' + 23: 0, # 'ж' + 15: 0, # 'з' + 2: 0, # 'и' + 26: 0, # 'й' + 12: 0, # 'к' + 10: 0, # 'л' + 14: 0, # 'м' + 6: 0, # 'н' + 4: 0, # 'о' + 13: 0, # 'п' + 7: 0, # 'Ñ€' + 8: 0, # 'Ñ' + 5: 0, # 'Ñ‚' + 19: 0, # 'у' + 29: 0, # 'Ñ„' + 25: 0, # 'Ñ…' + 22: 0, # 'ц' + 21: 0, # 'ч' + 27: 0, # 'ш' + 24: 0, # 'щ' + 17: 0, # 'ÑŠ' + 52: 0, # 'ÑŒ' + 42: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + 58: 0, # 'Ñ”' + 62: 0, # 'â„–' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_5_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 194, # '\x80' + 129: 195, # '\x81' + 130: 196, # '\x82' + 131: 197, # '\x83' + 132: 198, # '\x84' + 133: 199, # '\x85' + 134: 200, # '\x86' + 135: 201, # '\x87' + 136: 202, # '\x88' + 137: 203, # '\x89' + 138: 204, # '\x8a' + 139: 205, # '\x8b' + 140: 206, # '\x8c' + 141: 207, # '\x8d' + 142: 208, # '\x8e' + 143: 209, # '\x8f' + 144: 210, # '\x90' + 145: 211, # '\x91' + 146: 212, # '\x92' + 147: 213, # '\x93' + 148: 214, # '\x94' + 149: 215, # '\x95' + 150: 216, # '\x96' + 151: 217, # '\x97' + 152: 218, # '\x98' + 153: 219, # '\x99' + 154: 220, # '\x9a' + 155: 221, # '\x9b' + 156: 222, # '\x9c' + 157: 223, # '\x9d' + 158: 224, # '\x9e' + 159: 225, # '\x9f' + 160: 81, # '\xa0' + 161: 226, # 'Ð' + 162: 227, # 'Ђ' + 163: 228, # 'Ѓ' + 164: 229, # 'Є' + 165: 230, # 'Ð…' + 166: 105, # 'І' + 167: 231, # 'Ї' + 168: 232, # 'Ј' + 169: 233, # 'Љ' + 170: 234, # 'Њ' + 171: 235, # 'Ћ' + 172: 236, # 'ÐŒ' + 173: 45, # '\xad' + 174: 237, # 'ÐŽ' + 175: 238, # 'Ð' + 176: 31, # 'Ð' + 177: 32, # 'Б' + 178: 35, # 'Ð’' + 179: 43, # 'Г' + 180: 37, # 'Д' + 181: 44, # 'Е' + 182: 55, # 'Ж' + 183: 47, # 'З' + 184: 40, # 'И' + 185: 59, # 'Й' + 186: 33, # 'К' + 187: 46, # 'Л' + 188: 38, # 'М' + 189: 36, # 'Ð' + 190: 41, # 'О' + 191: 30, # 'П' + 192: 39, # 'Р' + 193: 28, # 'С' + 194: 34, # 'Т' + 195: 51, # 'У' + 196: 48, # 'Ф' + 197: 49, # 'Ð¥' + 198: 53, # 'Ц' + 199: 50, # 'Ч' + 200: 54, # 'Ш' + 201: 57, # 'Щ' + 202: 61, # 'Ъ' + 203: 239, # 'Ы' + 204: 67, # 'Ь' + 205: 240, # 'Э' + 206: 60, # 'Ю' + 207: 56, # 'Я' + 208: 1, # 'а' + 209: 18, # 'б' + 210: 9, # 'в' + 211: 20, # 'г' + 212: 11, # 'д' + 213: 3, # 'е' + 214: 23, # 'ж' + 215: 15, # 'з' + 216: 2, # 'и' + 217: 26, # 'й' + 218: 12, # 'к' + 219: 10, # 'л' + 220: 14, # 'м' + 221: 6, # 'н' + 222: 4, # 'о' + 223: 13, # 'п' + 224: 7, # 'Ñ€' + 225: 8, # 'Ñ' + 226: 5, # 'Ñ‚' + 227: 19, # 'у' + 228: 29, # 'Ñ„' + 229: 25, # 'Ñ…' + 230: 22, # 'ц' + 231: 21, # 'ч' + 232: 27, # 'ш' + 233: 24, # 'щ' + 234: 17, # 'ÑŠ' + 235: 75, # 'Ñ‹' + 236: 52, # 'ÑŒ' + 237: 241, # 'Ñ' + 238: 42, # 'ÑŽ' + 239: 16, # 'Ñ' + 240: 62, # 'â„–' + 241: 242, # 'Ñ‘' + 242: 243, # 'Ñ’' + 243: 244, # 'Ñ“' + 244: 58, # 'Ñ”' + 245: 245, # 'Ñ•' + 246: 98, # 'Ñ–' + 247: 246, # 'Ñ—' + 248: 247, # 'ј' + 249: 248, # 'Ñ™' + 250: 249, # 'Ñš' + 251: 250, # 'Ñ›' + 252: 251, # 'Ñœ' + 253: 91, # '§' + 254: 252, # 'Ñž' + 255: 253, # 'ÑŸ' +} + +ISO_8859_5_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Bulgarian', + char_to_order_map=ISO_8859_5_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрÑтуфхцчшщъьюÑ') + +WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 77, # 'A' + 66: 90, # 'B' + 67: 99, # 'C' + 68: 100, # 'D' + 69: 72, # 'E' + 70: 109, # 'F' + 71: 107, # 'G' + 72: 101, # 'H' + 73: 79, # 'I' + 74: 185, # 'J' + 75: 81, # 'K' + 76: 102, # 'L' + 77: 76, # 'M' + 78: 94, # 'N' + 79: 82, # 'O' + 80: 110, # 'P' + 81: 186, # 'Q' + 82: 108, # 'R' + 83: 91, # 'S' + 84: 74, # 'T' + 85: 119, # 'U' + 86: 84, # 'V' + 87: 96, # 'W' + 88: 111, # 'X' + 89: 187, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 65, # 'a' + 98: 69, # 'b' + 99: 70, # 'c' + 100: 66, # 'd' + 101: 63, # 'e' + 102: 68, # 'f' + 103: 112, # 'g' + 104: 103, # 'h' + 105: 92, # 'i' + 106: 194, # 'j' + 107: 104, # 'k' + 108: 95, # 'l' + 109: 86, # 'm' + 110: 87, # 'n' + 111: 71, # 'o' + 112: 116, # 'p' + 113: 195, # 'q' + 114: 85, # 'r' + 115: 93, # 's' + 116: 97, # 't' + 117: 113, # 'u' + 118: 196, # 'v' + 119: 197, # 'w' + 120: 198, # 'x' + 121: 199, # 'y' + 122: 200, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 206, # 'Ђ' + 129: 207, # 'Ѓ' + 130: 208, # '‚' + 131: 209, # 'Ñ“' + 132: 210, # '„' + 133: 211, # '…' + 134: 212, # '†' + 135: 213, # '‡' + 136: 120, # '€' + 137: 214, # '‰' + 138: 215, # 'Љ' + 139: 216, # '‹' + 140: 217, # 'Њ' + 141: 218, # 'ÐŒ' + 142: 219, # 'Ћ' + 143: 220, # 'Ð' + 144: 221, # 'Ñ’' + 145: 78, # '‘' + 146: 64, # '’' + 147: 83, # '“' + 148: 121, # 'â€' + 149: 98, # '•' + 150: 117, # '–' + 151: 105, # '—' + 152: 222, # None + 153: 223, # 'â„¢' + 154: 224, # 'Ñ™' + 155: 225, # '›' + 156: 226, # 'Ñš' + 157: 227, # 'Ñœ' + 158: 228, # 'Ñ›' + 159: 229, # 'ÑŸ' + 160: 88, # '\xa0' + 161: 230, # 'ÐŽ' + 162: 231, # 'Ñž' + 163: 232, # 'Ј' + 164: 233, # '¤' + 165: 122, # 'Ò' + 166: 89, # '¦' + 167: 106, # '§' + 168: 234, # 'Ð' + 169: 235, # '©' + 170: 236, # 'Є' + 171: 237, # '«' + 172: 238, # '¬' + 173: 45, # '\xad' + 174: 239, # '®' + 175: 240, # 'Ї' + 176: 73, # '°' + 177: 80, # '±' + 178: 118, # 'І' + 179: 114, # 'Ñ–' + 180: 241, # 'Ò‘' + 181: 242, # 'µ' + 182: 243, # '¶' + 183: 244, # '·' + 184: 245, # 'Ñ‘' + 185: 62, # 'â„–' + 186: 58, # 'Ñ”' + 187: 246, # '»' + 188: 247, # 'ј' + 189: 248, # 'Ð…' + 190: 249, # 'Ñ•' + 191: 250, # 'Ñ—' + 192: 31, # 'Ð' + 193: 32, # 'Б' + 194: 35, # 'Ð’' + 195: 43, # 'Г' + 196: 37, # 'Д' + 197: 44, # 'Е' + 198: 55, # 'Ж' + 199: 47, # 'З' + 200: 40, # 'И' + 201: 59, # 'Й' + 202: 33, # 'К' + 203: 46, # 'Л' + 204: 38, # 'М' + 205: 36, # 'Ð' + 206: 41, # 'О' + 207: 30, # 'П' + 208: 39, # 'Р' + 209: 28, # 'С' + 210: 34, # 'Т' + 211: 51, # 'У' + 212: 48, # 'Ф' + 213: 49, # 'Ð¥' + 214: 53, # 'Ц' + 215: 50, # 'Ч' + 216: 54, # 'Ш' + 217: 57, # 'Щ' + 218: 61, # 'Ъ' + 219: 251, # 'Ы' + 220: 67, # 'Ь' + 221: 252, # 'Э' + 222: 60, # 'Ю' + 223: 56, # 'Я' + 224: 1, # 'а' + 225: 18, # 'б' + 226: 9, # 'в' + 227: 20, # 'г' + 228: 11, # 'д' + 229: 3, # 'е' + 230: 23, # 'ж' + 231: 15, # 'з' + 232: 2, # 'и' + 233: 26, # 'й' + 234: 12, # 'к' + 235: 10, # 'л' + 236: 14, # 'м' + 237: 6, # 'н' + 238: 4, # 'о' + 239: 13, # 'п' + 240: 7, # 'Ñ€' + 241: 8, # 'Ñ' + 242: 5, # 'Ñ‚' + 243: 19, # 'у' + 244: 29, # 'Ñ„' + 245: 25, # 'Ñ…' + 246: 22, # 'ц' + 247: 21, # 'ч' + 248: 27, # 'ш' + 249: 24, # 'щ' + 250: 17, # 'ÑŠ' + 251: 75, # 'Ñ‹' + 252: 52, # 'ÑŒ' + 253: 253, # 'Ñ' + 254: 42, # 'ÑŽ' + 255: 16, # 'Ñ' +} + +WINDOWS_1251_BULGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Bulgarian', + char_to_order_map=WINDOWS_1251_BULGARIAN_CHAR_TO_ORDER, + language_model=BULGARIAN_LANG_MODEL, + typical_positive_ratio=0.969392, + keep_ascii_letters=False, + alphabet='ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯабвгдежзийклмнопрÑтуфхцчшщъьюÑ') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langgreekmodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langgreekmodel.py new file mode 100644 index 000000000..d99528ede --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langgreekmodel.py @@ -0,0 +1,4398 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +GREEK_LANG_MODEL = { + 60: { # 'e' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 55: { # 'o' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 2, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 1, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 58: { # 't' + 60: 2, # 'e' + 55: 1, # 'o' + 58: 1, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 36: { # '·' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 61: { # 'Ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 1, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 1, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 46: { # 'Έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 1, # 'σ' + 2: 2, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 54: { # 'ÎŒ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 31: { # 'Α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 2, # 'Î’' + 43: 2, # 'Γ' + 41: 1, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 0, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 2, # 'Ï‚' + 7: 2, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 51: { # 'Î’' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 1, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 43: { # 'Γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 1, # 'Α' + 51: 0, # 'Î’' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 1, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 41: { # 'Δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 34: { # 'Ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 1, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 1, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 2, # 'Ï„' + 12: 2, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 1, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 40: { # 'Η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 52: { # 'Θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 1, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 47: { # 'Ι' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 1, # 'Î’' + 43: 1, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 2, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 1, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 44: { # 'Κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 1, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 53: { # 'Λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 0, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 1, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 38: { # 'Μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 2, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 2, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 49: { # 'Î' + 60: 2, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 1, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 1, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 59: { # 'Ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 39: { # 'Ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 1, # 'Î’' + 43: 2, # 'Γ' + 41: 2, # 'Δ' + 34: 2, # 'Ε' + 40: 1, # 'Η' + 52: 2, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 2, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 2, # 'Φ' + 50: 2, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 2, # 'Ï…' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 35: { # 'Π' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 2, # 'Λ' + 38: 1, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 1, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 48: { # 'Ρ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 1, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 1, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 37: { # 'Σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 1, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 2, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 2, # 'Î¥' + 56: 0, # 'Φ' + 50: 2, # 'Χ' + 57: 2, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 2, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 2, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 33: { # 'Τ' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 2, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 2, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 1, # 'Τ' + 45: 1, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 2, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 2, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 45: { # 'Î¥' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 2, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 2, # 'Η' + 52: 2, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 2, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 2, # 'Π' + 48: 1, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 56: { # 'Φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 1, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 1, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 2, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 1, # 'Ï' + 27: 1, # 'ÏŽ' + }, + 50: { # 'Χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 1, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 2, # 'Ε' + 40: 2, # 'Η' + 52: 0, # 'Θ' + 47: 2, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 1, # 'Î' + 59: 0, # 'Ξ' + 39: 1, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 1, # 'Χ' + 57: 1, # 'Ω' + 17: 2, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 2, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 57: { # 'Ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 1, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 1, # 'Λ' + 38: 0, # 'Μ' + 49: 2, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 2, # 'Ρ' + 37: 2, # 'Σ' + 33: 2, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 2, # 'Ï' + 14: 2, # 'Ï‚' + 7: 2, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 17: { # 'ά' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 18: { # 'έ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 3, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 22: { # 'ή' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 1, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 15: { # 'ί' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 3, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 1, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 1: { # 'α' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 3, # 'ζ' + 13: 1, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 29: { # 'β' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 2, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 20: { # 'γ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 21: { # 'δ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 3: { # 'ε' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 32: { # 'ζ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 2, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 1, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 13: { # 'η' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 25: { # 'θ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 5: { # 'ι' + 60: 0, # 'e' + 55: 1, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 0, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 11: { # 'κ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 16: { # 'λ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 1, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 3, # 'λ' + 10: 2, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 10: { # 'μ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 1, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 2, # 'Ï…' + 28: 3, # 'φ' + 23: 0, # 'χ' + 42: 2, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 6: { # 'ν' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 1, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 30: { # 'ξ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 2, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 2, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 1, # 'ÏŽ' + }, + 4: { # 'ο' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 2, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 1, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 9: { # 'Ï€' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 3, # 'λ' + 10: 0, # 'μ' + 6: 2, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 2, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 8: { # 'Ï' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 1, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 2, # 'Ï€' + 8: 2, # 'Ï' + 14: 0, # 'Ï‚' + 7: 2, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 14: { # 'Ï‚' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 2, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 0, # 'θ' + 5: 0, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 0, # 'Ï„' + 12: 0, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 7: { # 'σ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 3, # 'β' + 20: 0, # 'γ' + 21: 2, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 3, # 'θ' + 5: 3, # 'ι' + 11: 3, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 3, # 'φ' + 23: 3, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 2: { # 'Ï„' + 60: 0, # 'e' + 55: 2, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 2, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 3, # 'ι' + 11: 2, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 2, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 12: { # 'Ï…' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 2, # 'ί' + 1: 3, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 2, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 3, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 2, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 28: { # 'φ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 3, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 0, # 'μ' + 6: 1, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 1, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 2, # 'Ï' + 27: 2, # 'ÏŽ' + }, + 23: { # 'χ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 3, # 'ά' + 18: 2, # 'έ' + 22: 3, # 'ή' + 15: 3, # 'ί' + 1: 3, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 3, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 2, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 3, # 'ο' + 9: 0, # 'Ï€' + 8: 3, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 3, # 'Ï„' + 12: 3, # 'Ï…' + 28: 0, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 3, # 'ω' + 19: 3, # 'ÏŒ' + 26: 3, # 'Ï' + 27: 3, # 'ÏŽ' + }, + 42: { # 'ψ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 2, # 'ά' + 18: 2, # 'έ' + 22: 1, # 'ή' + 15: 2, # 'ί' + 1: 2, # 'α' + 29: 0, # 'β' + 20: 0, # 'γ' + 21: 0, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 3, # 'η' + 25: 0, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 0, # 'λ' + 10: 0, # 'μ' + 6: 0, # 'ν' + 30: 0, # 'ξ' + 4: 2, # 'ο' + 9: 0, # 'Ï€' + 8: 0, # 'Ï' + 14: 0, # 'Ï‚' + 7: 0, # 'σ' + 2: 2, # 'Ï„' + 12: 1, # 'Ï…' + 28: 0, # 'φ' + 23: 0, # 'χ' + 42: 0, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 24: { # 'ω' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 1, # 'ά' + 18: 0, # 'έ' + 22: 2, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 2, # 'β' + 20: 3, # 'γ' + 21: 2, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 0, # 'η' + 25: 3, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 0, # 'ξ' + 4: 0, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 19: { # 'ÏŒ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 3, # 'β' + 20: 3, # 'γ' + 21: 3, # 'δ' + 3: 1, # 'ε' + 32: 2, # 'ζ' + 13: 2, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 2, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 3, # 'χ' + 42: 2, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 26: { # 'Ï' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 2, # 'α' + 29: 2, # 'β' + 20: 2, # 'γ' + 21: 1, # 'δ' + 3: 3, # 'ε' + 32: 0, # 'ζ' + 13: 2, # 'η' + 25: 3, # 'θ' + 5: 0, # 'ι' + 11: 3, # 'κ' + 16: 3, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 2, # 'ξ' + 4: 3, # 'ο' + 9: 3, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 2, # 'φ' + 23: 2, # 'χ' + 42: 2, # 'ψ' + 24: 2, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, + 27: { # 'ÏŽ' + 60: 0, # 'e' + 55: 0, # 'o' + 58: 0, # 't' + 36: 0, # '·' + 61: 0, # 'Ά' + 46: 0, # 'Έ' + 54: 0, # 'ÎŒ' + 31: 0, # 'Α' + 51: 0, # 'Î’' + 43: 0, # 'Γ' + 41: 0, # 'Δ' + 34: 0, # 'Ε' + 40: 0, # 'Η' + 52: 0, # 'Θ' + 47: 0, # 'Ι' + 44: 0, # 'Κ' + 53: 0, # 'Λ' + 38: 0, # 'Μ' + 49: 0, # 'Î' + 59: 0, # 'Ξ' + 39: 0, # 'Ο' + 35: 0, # 'Π' + 48: 0, # 'Ρ' + 37: 0, # 'Σ' + 33: 0, # 'Τ' + 45: 0, # 'Î¥' + 56: 0, # 'Φ' + 50: 0, # 'Χ' + 57: 0, # 'Ω' + 17: 0, # 'ά' + 18: 0, # 'έ' + 22: 0, # 'ή' + 15: 0, # 'ί' + 1: 0, # 'α' + 29: 1, # 'β' + 20: 0, # 'γ' + 21: 3, # 'δ' + 3: 0, # 'ε' + 32: 0, # 'ζ' + 13: 1, # 'η' + 25: 2, # 'θ' + 5: 2, # 'ι' + 11: 0, # 'κ' + 16: 2, # 'λ' + 10: 3, # 'μ' + 6: 3, # 'ν' + 30: 1, # 'ξ' + 4: 0, # 'ο' + 9: 2, # 'Ï€' + 8: 3, # 'Ï' + 14: 3, # 'Ï‚' + 7: 3, # 'σ' + 2: 3, # 'Ï„' + 12: 0, # 'Ï…' + 28: 1, # 'φ' + 23: 1, # 'χ' + 42: 0, # 'ψ' + 24: 0, # 'ω' + 19: 0, # 'ÏŒ' + 26: 0, # 'Ï' + 27: 0, # 'ÏŽ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1253_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '€' + 129: 255, # None + 130: 255, # '‚' + 131: 255, # 'Æ’' + 132: 255, # '„' + 133: 255, # '…' + 134: 255, # '†' + 135: 255, # '‡' + 136: 255, # None + 137: 255, # '‰' + 138: 255, # None + 139: 255, # '‹' + 140: 255, # None + 141: 255, # None + 142: 255, # None + 143: 255, # None + 144: 255, # None + 145: 255, # '‘' + 146: 255, # '’' + 147: 255, # '“' + 148: 255, # 'â€' + 149: 255, # '•' + 150: 255, # '–' + 151: 255, # '—' + 152: 255, # None + 153: 255, # 'â„¢' + 154: 255, # None + 155: 255, # '›' + 156: 255, # None + 157: 255, # None + 158: 255, # None + 159: 255, # None + 160: 253, # '\xa0' + 161: 233, # 'Î…' + 162: 61, # 'Ά' + 163: 253, # '£' + 164: 253, # '¤' + 165: 253, # 'Â¥' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # None + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # '®' + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 253, # 'µ' + 182: 253, # '¶' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'ÎŒ' + 189: 253, # '½' + 190: 108, # 'ÎŽ' + 191: 123, # 'Î' + 192: 110, # 'Î' + 193: 31, # 'Α' + 194: 51, # 'Î’' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Î' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Î¥' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'Ï€' + 241: 8, # 'Ï' + 242: 14, # 'Ï‚' + 243: 7, # 'σ' + 244: 2, # 'Ï„' + 245: 12, # 'Ï…' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ÏŠ' + 251: 75, # 'Ï‹' + 252: 19, # 'ÏŒ' + 253: 26, # 'Ï' + 254: 27, # 'ÏŽ' + 255: 253, # None +} + +WINDOWS_1253_GREEK_MODEL = SingleByteCharSetModel(charset_name='windows-1253', + language='Greek', + char_to_order_map=WINDOWS_1253_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎÎΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπÏςστυφχψωόÏÏŽ') + +ISO_8859_7_GREEK_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 82, # 'A' + 66: 100, # 'B' + 67: 104, # 'C' + 68: 94, # 'D' + 69: 98, # 'E' + 70: 101, # 'F' + 71: 116, # 'G' + 72: 102, # 'H' + 73: 111, # 'I' + 74: 187, # 'J' + 75: 117, # 'K' + 76: 92, # 'L' + 77: 88, # 'M' + 78: 113, # 'N' + 79: 85, # 'O' + 80: 79, # 'P' + 81: 118, # 'Q' + 82: 105, # 'R' + 83: 83, # 'S' + 84: 67, # 'T' + 85: 114, # 'U' + 86: 119, # 'V' + 87: 95, # 'W' + 88: 99, # 'X' + 89: 109, # 'Y' + 90: 188, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 72, # 'a' + 98: 70, # 'b' + 99: 80, # 'c' + 100: 81, # 'd' + 101: 60, # 'e' + 102: 96, # 'f' + 103: 93, # 'g' + 104: 89, # 'h' + 105: 68, # 'i' + 106: 120, # 'j' + 107: 97, # 'k' + 108: 77, # 'l' + 109: 86, # 'm' + 110: 69, # 'n' + 111: 55, # 'o' + 112: 78, # 'p' + 113: 115, # 'q' + 114: 65, # 'r' + 115: 66, # 's' + 116: 58, # 't' + 117: 76, # 'u' + 118: 106, # 'v' + 119: 103, # 'w' + 120: 87, # 'x' + 121: 107, # 'y' + 122: 112, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 255, # '\x80' + 129: 255, # '\x81' + 130: 255, # '\x82' + 131: 255, # '\x83' + 132: 255, # '\x84' + 133: 255, # '\x85' + 134: 255, # '\x86' + 135: 255, # '\x87' + 136: 255, # '\x88' + 137: 255, # '\x89' + 138: 255, # '\x8a' + 139: 255, # '\x8b' + 140: 255, # '\x8c' + 141: 255, # '\x8d' + 142: 255, # '\x8e' + 143: 255, # '\x8f' + 144: 255, # '\x90' + 145: 255, # '\x91' + 146: 255, # '\x92' + 147: 255, # '\x93' + 148: 255, # '\x94' + 149: 255, # '\x95' + 150: 255, # '\x96' + 151: 255, # '\x97' + 152: 255, # '\x98' + 153: 255, # '\x99' + 154: 255, # '\x9a' + 155: 255, # '\x9b' + 156: 255, # '\x9c' + 157: 255, # '\x9d' + 158: 255, # '\x9e' + 159: 255, # '\x9f' + 160: 253, # '\xa0' + 161: 233, # '‘' + 162: 90, # '’' + 163: 253, # '£' + 164: 253, # '€' + 165: 253, # '₯' + 166: 253, # '¦' + 167: 253, # '§' + 168: 253, # '¨' + 169: 253, # '©' + 170: 253, # 'ͺ' + 171: 253, # '«' + 172: 253, # '¬' + 173: 74, # '\xad' + 174: 253, # None + 175: 253, # '―' + 176: 253, # '°' + 177: 253, # '±' + 178: 253, # '²' + 179: 253, # '³' + 180: 247, # '΄' + 181: 248, # 'Î…' + 182: 61, # 'Ά' + 183: 36, # '·' + 184: 46, # 'Έ' + 185: 71, # 'Ή' + 186: 73, # 'Ί' + 187: 253, # '»' + 188: 54, # 'ÎŒ' + 189: 253, # '½' + 190: 108, # 'ÎŽ' + 191: 123, # 'Î' + 192: 110, # 'Î' + 193: 31, # 'Α' + 194: 51, # 'Î’' + 195: 43, # 'Γ' + 196: 41, # 'Δ' + 197: 34, # 'Ε' + 198: 91, # 'Ζ' + 199: 40, # 'Η' + 200: 52, # 'Θ' + 201: 47, # 'Ι' + 202: 44, # 'Κ' + 203: 53, # 'Λ' + 204: 38, # 'Μ' + 205: 49, # 'Î' + 206: 59, # 'Ξ' + 207: 39, # 'Ο' + 208: 35, # 'Π' + 209: 48, # 'Ρ' + 210: 250, # None + 211: 37, # 'Σ' + 212: 33, # 'Τ' + 213: 45, # 'Î¥' + 214: 56, # 'Φ' + 215: 50, # 'Χ' + 216: 84, # 'Ψ' + 217: 57, # 'Ω' + 218: 120, # 'Ϊ' + 219: 121, # 'Ϋ' + 220: 17, # 'ά' + 221: 18, # 'έ' + 222: 22, # 'ή' + 223: 15, # 'ί' + 224: 124, # 'ΰ' + 225: 1, # 'α' + 226: 29, # 'β' + 227: 20, # 'γ' + 228: 21, # 'δ' + 229: 3, # 'ε' + 230: 32, # 'ζ' + 231: 13, # 'η' + 232: 25, # 'θ' + 233: 5, # 'ι' + 234: 11, # 'κ' + 235: 16, # 'λ' + 236: 10, # 'μ' + 237: 6, # 'ν' + 238: 30, # 'ξ' + 239: 4, # 'ο' + 240: 9, # 'Ï€' + 241: 8, # 'Ï' + 242: 14, # 'Ï‚' + 243: 7, # 'σ' + 244: 2, # 'Ï„' + 245: 12, # 'Ï…' + 246: 28, # 'φ' + 247: 23, # 'χ' + 248: 42, # 'ψ' + 249: 24, # 'ω' + 250: 64, # 'ÏŠ' + 251: 75, # 'Ï‹' + 252: 19, # 'ÏŒ' + 253: 26, # 'Ï' + 254: 27, # 'ÏŽ' + 255: 253, # None +} + +ISO_8859_7_GREEK_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-7', + language='Greek', + char_to_order_map=ISO_8859_7_GREEK_CHAR_TO_ORDER, + language_model=GREEK_LANG_MODEL, + typical_positive_ratio=0.982851, + keep_ascii_letters=False, + alphabet='ΆΈΉΊΌΎÎΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΤΥΦΧΨΩάέήίαβγδεζηθικλμνξοπÏςστυφχψωόÏÏŽ') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhebrewmodel.py new file mode 100644 index 000000000..484c652a4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhebrewmodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HEBREW_LANG_MODEL = { + 50: { # 'a' + 50: 0, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 0, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 1, # '×§' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 60: { # 'c' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 61: { # 'd' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 42: { # 'e' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 2, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 2, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 1, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 53: { # 'i' + 50: 1, # 'a' + 60: 2, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 0, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 56: { # 'l' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 2, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 54: { # 'n' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 49: { # 'o' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 2, # 'n' + 49: 1, # 'o' + 51: 2, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 51: { # 'r' + 50: 2, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 2, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 43: { # 's' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 2, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 44: { # 't' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 0, # 'd' + 42: 2, # 'e' + 53: 2, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 2, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 63: { # 'u' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 34: { # '\xa0' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 2, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 55: { # '´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 1, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 2, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 48: { # '¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 39: { # '½' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 57: { # '¾' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 30: { # 'Ö°' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 2, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 1, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 2, # '×¢' + 26: 0, # '×£' + 18: 2, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 59: { # 'Ö±' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 1, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 0, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 41: { # 'Ö²' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 0, # '×' + 6: 2, # 'מ' + 23: 0, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 33: { # 'Ö´' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 1, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 2, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 37: { # 'Öµ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 1, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 36: { # 'Ö¶' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 1, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 2, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 31: { # 'Ö·' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 2, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 29: { # 'Ö¸' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 2, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 35: { # 'Ö¹' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 2, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 2, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 62: { # 'Ö»' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 2, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 1, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 28: { # 'Ö¼' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'Ö°' + 59: 0, # 'Ö±' + 41: 1, # 'Ö²' + 33: 3, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 3, # 'Ö·' + 29: 3, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 2, # '×' + 45: 1, # 'ׂ' + 9: 2, # '×' + 8: 2, # 'ב' + 20: 1, # '×’' + 16: 2, # 'ד' + 3: 1, # '×”' + 2: 2, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 2, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 2, # 'ל' + 11: 1, # '×' + 6: 2, # 'מ' + 23: 1, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 2, # 'ר' + 10: 2, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 38: { # '×' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 2, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 45: { # 'ׂ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 2, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 9: { # '×' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 2, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 2, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 8: { # 'ב' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 3, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 1, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 20: { # '×’' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 1, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 16: { # 'ד' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 1, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 3: { # '×”' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 1, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 3, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 0, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 2: { # 'ו' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 3, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 3, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 3, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 24: { # '×–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 1, # 'Ö²' + 33: 1, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 2, # 'ב' + 20: 2, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 2, # '×—' + 22: 1, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 2, # '× ' + 19: 1, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 2, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 14: { # '×—' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 1, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 2, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 1, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 22: { # 'ט' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 1, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 3, # 'ר' + 10: 2, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 1: { # '×™' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 3, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 25: { # 'ך' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 1, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 15: { # '×›' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 3, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 2, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 2, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 4: { # 'ל' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 3, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 11: { # '×' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 1, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 6: { # 'מ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 0, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 23: { # 'ן' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 1, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 1, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 12: { # '× ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 19: { # 'ס' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 2, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 1, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 3, # '×£' + 18: 3, # 'פ' + 27: 0, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 1, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 13: { # '×¢' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 1, # 'Ö±' + 41: 2, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 1, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 2, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 2, # '×¢' + 26: 1, # '×£' + 18: 2, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 26: { # '×£' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 1, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 18: { # 'פ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 1, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 2, # 'ב' + 20: 3, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 2, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 2, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 27: { # '×¥' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 1, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 21: { # 'צ' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 1, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 1, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 0, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 17: { # '×§' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 1, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 2, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 1, # 'ך' + 15: 1, # '×›' + 4: 3, # 'ל' + 11: 2, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 2, # '×¥' + 21: 3, # 'צ' + 17: 2, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 7: { # 'ר' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 2, # '´' + 48: 1, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 1, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 2, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 3, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 3, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 3, # '×¥' + 21: 3, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 10: { # 'ש' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 1, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 1, # 'Ö´' + 37: 1, # 'Öµ' + 36: 1, # 'Ö¶' + 31: 1, # 'Ö·' + 29: 1, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 3, # '×' + 45: 2, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 3, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 3, # 'ט' + 1: 3, # '×™' + 25: 3, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 2, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 1, # '…' + }, + 5: { # 'ת' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 1, # '\xa0' + 55: 0, # '´' + 48: 1, # '¼' + 39: 1, # '½' + 57: 0, # '¾' + 30: 2, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 2, # 'Ö´' + 37: 2, # 'Öµ' + 36: 2, # 'Ö¶' + 31: 2, # 'Ö·' + 29: 2, # 'Ö¸' + 35: 1, # 'Ö¹' + 62: 1, # 'Ö»' + 28: 2, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 3, # '×' + 8: 3, # 'ב' + 20: 3, # '×’' + 16: 2, # 'ד' + 3: 3, # '×”' + 2: 3, # 'ו' + 24: 2, # '×–' + 14: 3, # '×—' + 22: 2, # 'ט' + 1: 3, # '×™' + 25: 2, # 'ך' + 15: 3, # '×›' + 4: 3, # 'ל' + 11: 3, # '×' + 6: 3, # 'מ' + 23: 3, # 'ן' + 12: 3, # '× ' + 19: 2, # 'ס' + 13: 3, # '×¢' + 26: 2, # '×£' + 18: 3, # 'פ' + 27: 1, # '×¥' + 21: 2, # 'צ' + 17: 3, # '×§' + 7: 3, # 'ר' + 10: 3, # 'ש' + 5: 3, # 'ת' + 32: 1, # '–' + 52: 1, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, + 32: { # '–' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 1, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 52: { # '’' + 50: 1, # 'a' + 60: 0, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 2, # 's' + 44: 2, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 1, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 47: { # '“' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 1, # 'l' + 54: 1, # 'n' + 49: 1, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 1, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 2, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 1, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 1, # '×—' + 22: 1, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 1, # 'ס' + 13: 1, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 1, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 46: { # 'â€' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 1, # 'ב' + 20: 1, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 1, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 0, # '†' + 40: 0, # '…' + }, + 58: { # '†' + 50: 0, # 'a' + 60: 0, # 'c' + 61: 0, # 'd' + 42: 0, # 'e' + 53: 0, # 'i' + 56: 0, # 'l' + 54: 0, # 'n' + 49: 0, # 'o' + 51: 0, # 'r' + 43: 0, # 's' + 44: 0, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 0, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 0, # '×”' + 2: 0, # 'ו' + 24: 0, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 0, # '×™' + 25: 0, # 'ך' + 15: 0, # '×›' + 4: 0, # 'ל' + 11: 0, # '×' + 6: 0, # 'מ' + 23: 0, # 'ן' + 12: 0, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 0, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 0, # 'ר' + 10: 0, # 'ש' + 5: 0, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 0, # 'â€' + 58: 2, # '†' + 40: 0, # '…' + }, + 40: { # '…' + 50: 1, # 'a' + 60: 1, # 'c' + 61: 1, # 'd' + 42: 1, # 'e' + 53: 1, # 'i' + 56: 0, # 'l' + 54: 1, # 'n' + 49: 0, # 'o' + 51: 1, # 'r' + 43: 1, # 's' + 44: 1, # 't' + 63: 0, # 'u' + 34: 0, # '\xa0' + 55: 0, # '´' + 48: 0, # '¼' + 39: 0, # '½' + 57: 0, # '¾' + 30: 0, # 'Ö°' + 59: 0, # 'Ö±' + 41: 0, # 'Ö²' + 33: 0, # 'Ö´' + 37: 0, # 'Öµ' + 36: 0, # 'Ö¶' + 31: 0, # 'Ö·' + 29: 0, # 'Ö¸' + 35: 0, # 'Ö¹' + 62: 0, # 'Ö»' + 28: 0, # 'Ö¼' + 38: 0, # '×' + 45: 0, # 'ׂ' + 9: 1, # '×' + 8: 0, # 'ב' + 20: 0, # '×’' + 16: 0, # 'ד' + 3: 1, # '×”' + 2: 1, # 'ו' + 24: 1, # '×–' + 14: 0, # '×—' + 22: 0, # 'ט' + 1: 1, # '×™' + 25: 0, # 'ך' + 15: 1, # '×›' + 4: 1, # 'ל' + 11: 0, # '×' + 6: 1, # 'מ' + 23: 0, # 'ן' + 12: 1, # '× ' + 19: 0, # 'ס' + 13: 0, # '×¢' + 26: 0, # '×£' + 18: 1, # 'פ' + 27: 0, # '×¥' + 21: 0, # 'צ' + 17: 0, # '×§' + 7: 1, # 'ר' + 10: 1, # 'ש' + 5: 1, # 'ת' + 32: 0, # '–' + 52: 0, # '’' + 47: 0, # '“' + 46: 1, # 'â€' + 58: 0, # '†' + 40: 2, # '…' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1255_HEBREW_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 69, # 'A' + 66: 91, # 'B' + 67: 79, # 'C' + 68: 80, # 'D' + 69: 92, # 'E' + 70: 89, # 'F' + 71: 97, # 'G' + 72: 90, # 'H' + 73: 68, # 'I' + 74: 111, # 'J' + 75: 112, # 'K' + 76: 82, # 'L' + 77: 73, # 'M' + 78: 95, # 'N' + 79: 85, # 'O' + 80: 78, # 'P' + 81: 121, # 'Q' + 82: 86, # 'R' + 83: 71, # 'S' + 84: 67, # 'T' + 85: 102, # 'U' + 86: 107, # 'V' + 87: 84, # 'W' + 88: 114, # 'X' + 89: 103, # 'Y' + 90: 115, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 50, # 'a' + 98: 74, # 'b' + 99: 60, # 'c' + 100: 61, # 'd' + 101: 42, # 'e' + 102: 76, # 'f' + 103: 70, # 'g' + 104: 64, # 'h' + 105: 53, # 'i' + 106: 105, # 'j' + 107: 93, # 'k' + 108: 56, # 'l' + 109: 65, # 'm' + 110: 54, # 'n' + 111: 49, # 'o' + 112: 66, # 'p' + 113: 110, # 'q' + 114: 51, # 'r' + 115: 43, # 's' + 116: 44, # 't' + 117: 63, # 'u' + 118: 81, # 'v' + 119: 77, # 'w' + 120: 98, # 'x' + 121: 75, # 'y' + 122: 108, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 124, # '€' + 129: 202, # None + 130: 203, # '‚' + 131: 204, # 'Æ’' + 132: 205, # '„' + 133: 40, # '…' + 134: 58, # '†' + 135: 206, # '‡' + 136: 207, # 'ˆ' + 137: 208, # '‰' + 138: 209, # None + 139: 210, # '‹' + 140: 211, # None + 141: 212, # None + 142: 213, # None + 143: 214, # None + 144: 215, # None + 145: 83, # '‘' + 146: 52, # '’' + 147: 47, # '“' + 148: 46, # 'â€' + 149: 72, # '•' + 150: 32, # '–' + 151: 94, # '—' + 152: 216, # 'Ëœ' + 153: 113, # 'â„¢' + 154: 217, # None + 155: 109, # '›' + 156: 218, # None + 157: 219, # None + 158: 220, # None + 159: 221, # None + 160: 34, # '\xa0' + 161: 116, # '¡' + 162: 222, # '¢' + 163: 118, # '£' + 164: 100, # '₪' + 165: 223, # 'Â¥' + 166: 224, # '¦' + 167: 117, # '§' + 168: 119, # '¨' + 169: 104, # '©' + 170: 125, # '×' + 171: 225, # '«' + 172: 226, # '¬' + 173: 87, # '\xad' + 174: 99, # '®' + 175: 227, # '¯' + 176: 106, # '°' + 177: 122, # '±' + 178: 123, # '²' + 179: 228, # '³' + 180: 55, # '´' + 181: 229, # 'µ' + 182: 230, # '¶' + 183: 101, # '·' + 184: 231, # '¸' + 185: 232, # '¹' + 186: 120, # '÷' + 187: 233, # '»' + 188: 48, # '¼' + 189: 39, # '½' + 190: 57, # '¾' + 191: 234, # '¿' + 192: 30, # 'Ö°' + 193: 59, # 'Ö±' + 194: 41, # 'Ö²' + 195: 88, # 'Ö³' + 196: 33, # 'Ö´' + 197: 37, # 'Öµ' + 198: 36, # 'Ö¶' + 199: 31, # 'Ö·' + 200: 29, # 'Ö¸' + 201: 35, # 'Ö¹' + 202: 235, # None + 203: 62, # 'Ö»' + 204: 28, # 'Ö¼' + 205: 236, # 'Ö½' + 206: 126, # 'Ö¾' + 207: 237, # 'Ö¿' + 208: 238, # '×€' + 209: 38, # '×' + 210: 45, # 'ׂ' + 211: 239, # '׃' + 212: 240, # '×°' + 213: 241, # '×±' + 214: 242, # 'ײ' + 215: 243, # '׳' + 216: 127, # '×´' + 217: 244, # None + 218: 245, # None + 219: 246, # None + 220: 247, # None + 221: 248, # None + 222: 249, # None + 223: 250, # None + 224: 9, # '×' + 225: 8, # 'ב' + 226: 20, # '×’' + 227: 16, # 'ד' + 228: 3, # '×”' + 229: 2, # 'ו' + 230: 24, # '×–' + 231: 14, # '×—' + 232: 22, # 'ט' + 233: 1, # '×™' + 234: 25, # 'ך' + 235: 15, # '×›' + 236: 4, # 'ל' + 237: 11, # '×' + 238: 6, # 'מ' + 239: 23, # 'ן' + 240: 12, # '× ' + 241: 19, # 'ס' + 242: 13, # '×¢' + 243: 26, # '×£' + 244: 18, # 'פ' + 245: 27, # '×¥' + 246: 21, # 'צ' + 247: 17, # '×§' + 248: 7, # 'ר' + 249: 10, # 'ש' + 250: 5, # 'ת' + 251: 251, # None + 252: 252, # None + 253: 128, # '\u200e' + 254: 96, # '\u200f' + 255: 253, # None +} + +WINDOWS_1255_HEBREW_MODEL = SingleByteCharSetModel(charset_name='windows-1255', + language='Hebrew', + char_to_order_map=WINDOWS_1255_HEBREW_CHAR_TO_ORDER, + language_model=HEBREW_LANG_MODEL, + typical_positive_ratio=0.984004, + keep_ascii_letters=False, + alphabet='×בגדהוזחטיךכל×מןנסעףפץצקרשתװױײ') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhungarianmodel.py new file mode 100644 index 000000000..bbc5cda64 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhungarianmodel.py @@ -0,0 +1,4650 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +HUNGARIAN_LANG_MODEL = { + 28: { # 'A' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 2, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Ã' + 44: 0, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 40: { # 'B' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 3, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 54: { # 'C' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 3, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 45: { # 'D' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 32: { # 'E' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 50: { # 'F' + 28: 1, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 0, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 49: { # 'G' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 38: { # 'H' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 1, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 2, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 39: { # 'I' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 53: { # 'J' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 36: { # 'K' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 2, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 41: { # 'L' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 1, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 34: { # 'M' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 3, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 1, # 'ű' + }, + 35: { # 'N' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 2, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 2, # 'Y' + 52: 1, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 47: { # 'O' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 2, # 'K' + 41: 2, # 'L' + 34: 2, # 'M' + 35: 2, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 1, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 1, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 46: { # 'P' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 0, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 0, # 'ű' + }, + 43: { # 'R' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 2, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 2, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 33: { # 'S' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 3, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 37: { # 'T' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 1, # 'S' + 37: 2, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 2, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 1, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 2, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 2, # 'Ã' + 44: 2, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 57: { # 'U' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 48: { # 'V' + 28: 2, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 2, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 2, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 2, # 'o' + 23: 0, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 2, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 0, # 'Ú' + 63: 1, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 55: { # 'Y' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 1, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 2, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 0, # 'r' + 5: 0, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 1, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 52: { # 'Z' + 28: 2, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 2, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 2, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 2, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 1, # 'U' + 48: 1, # 'V' + 55: 1, # 'Y' + 52: 1, # 'Z' + 2: 1, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 0, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 2, # 'Ã' + 44: 1, # 'É' + 61: 1, # 'Ã' + 58: 1, # 'Ó' + 59: 1, # 'Ö' + 60: 1, # 'Ú' + 63: 1, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 2: { # 'a' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 18: { # 'b' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 26: { # 'c' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 1, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 2, # 'é' + 30: 2, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 17: { # 'd' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 1, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 1: { # 'e' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 2, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 27: { # 'f' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 3, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 2, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 12: { # 'g' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 20: { # 'h' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 3, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 1, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 9: { # 'i' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 2, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 1, # 'ö' + 31: 2, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 1, # 'ű' + }, + 22: { # 'j' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 1, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 7: { # 'k' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 2, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 1, # 'ú' + 29: 3, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 6: { # 'l' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 1, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 3, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 3, # 'Å‘' + 56: 1, # 'ű' + }, + 13: { # 'm' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 2, # 'ű' + }, + 4: { # 'n' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 1, # 'x' + 16: 3, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 8: { # 'o' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 1, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 23: { # 'p' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 3, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 10: { # 'r' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 2, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 2, # 'ű' + }, + 5: { # 's' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 2, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 3: { # 't' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 1, # 'g' + 20: 3, # 'h' + 9: 3, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 3, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 3, # 'ú' + 29: 3, # 'ü' + 42: 3, # 'Å‘' + 56: 2, # 'ű' + }, + 21: { # 'u' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 2, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 3, # 'v' + 62: 1, # 'x' + 16: 1, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 2, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 1, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 19: { # 'v' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 2, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 2, # 'ö' + 31: 1, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 1, # 'ű' + }, + 62: { # 'x' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 0, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 1, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 1, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 1, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 16: { # 'y' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 3, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 2, # 'j' + 7: 2, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 2, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 2, # 'í' + 25: 2, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 2, # 'ü' + 42: 1, # 'Å‘' + 56: 2, # 'ű' + }, + 11: { # 'z' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 3, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 3, # 'd' + 1: 3, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 3, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 3, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 3, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 3, # 'á' + 15: 3, # 'é' + 30: 3, # 'í' + 25: 3, # 'ó' + 24: 3, # 'ö' + 31: 2, # 'ú' + 29: 3, # 'ü' + 42: 2, # 'Å‘' + 56: 1, # 'ű' + }, + 51: { # 'Ã' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 1, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 44: { # 'É' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 1, # 'E' + 50: 0, # 'F' + 49: 2, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 2, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 2, # 'R' + 33: 2, # 'S' + 37: 2, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 3, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 61: { # 'Ã' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 0, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 1, # 'm' + 4: 0, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 0, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 58: { # 'Ó' + 28: 1, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 1, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 2, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 2, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 0, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 1, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 59: { # 'Ö' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 1, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 1, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 0, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 2, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 60: { # 'Ú' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 1, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 1, # 'F' + 49: 1, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 0, # 'b' + 26: 0, # 'c' + 17: 0, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 2, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 2, # 'j' + 7: 0, # 'k' + 6: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 0, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 0, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 63: { # 'Ü' + 28: 0, # 'A' + 40: 1, # 'B' + 54: 0, # 'C' + 45: 1, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 1, # 'G' + 38: 1, # 'H' + 39: 0, # 'I' + 53: 1, # 'J' + 36: 1, # 'K' + 41: 1, # 'L' + 34: 1, # 'M' + 35: 1, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 1, # 'R' + 33: 1, # 'S' + 37: 1, # 'T' + 57: 0, # 'U' + 48: 1, # 'V' + 55: 0, # 'Y' + 52: 1, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 0, # 'f' + 12: 1, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 0, # 'j' + 7: 0, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 1, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 14: { # 'á' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 3, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 3, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 2, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 1, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 2, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 15: { # 'é' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 3, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 3, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 30: { # 'í' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 0, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 2, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 2, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 25: { # 'ó' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 2, # 'a' + 18: 3, # 'b' + 26: 2, # 'c' + 17: 3, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 2, # 'g' + 20: 2, # 'h' + 9: 2, # 'i' + 22: 2, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 8: 1, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 1, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 1, # 'ö' + 31: 1, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 24: { # 'ö' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 0, # 'a' + 18: 3, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 0, # 'e' + 27: 1, # 'f' + 12: 2, # 'g' + 20: 1, # 'h' + 9: 0, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 2, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 3, # 't' + 21: 0, # 'u' + 19: 3, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 3, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 31: { # 'ú' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 2, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 2, # 'f' + 12: 3, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 3, # 'j' + 7: 1, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 3, # 'r' + 5: 3, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 1, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 29: { # 'ü' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 3, # 'g' + 20: 2, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 3, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 8: 0, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 0, # 'u' + 19: 2, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 42: { # 'Å‘' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 2, # 'b' + 26: 1, # 'c' + 17: 2, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 2, # 'k' + 6: 3, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 8: 1, # 'o' + 23: 1, # 'p' + 10: 2, # 'r' + 5: 2, # 's' + 3: 2, # 't' + 21: 1, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 1, # 'é' + 30: 1, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 1, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, + 56: { # 'ű' + 28: 0, # 'A' + 40: 0, # 'B' + 54: 0, # 'C' + 45: 0, # 'D' + 32: 0, # 'E' + 50: 0, # 'F' + 49: 0, # 'G' + 38: 0, # 'H' + 39: 0, # 'I' + 53: 0, # 'J' + 36: 0, # 'K' + 41: 0, # 'L' + 34: 0, # 'M' + 35: 0, # 'N' + 47: 0, # 'O' + 46: 0, # 'P' + 43: 0, # 'R' + 33: 0, # 'S' + 37: 0, # 'T' + 57: 0, # 'U' + 48: 0, # 'V' + 55: 0, # 'Y' + 52: 0, # 'Z' + 2: 1, # 'a' + 18: 1, # 'b' + 26: 0, # 'c' + 17: 1, # 'd' + 1: 1, # 'e' + 27: 1, # 'f' + 12: 1, # 'g' + 20: 1, # 'h' + 9: 1, # 'i' + 22: 1, # 'j' + 7: 1, # 'k' + 6: 1, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 8: 0, # 'o' + 23: 0, # 'p' + 10: 1, # 'r' + 5: 1, # 's' + 3: 1, # 't' + 21: 0, # 'u' + 19: 1, # 'v' + 62: 0, # 'x' + 16: 0, # 'y' + 11: 2, # 'z' + 51: 0, # 'Ã' + 44: 0, # 'É' + 61: 0, # 'Ã' + 58: 0, # 'Ó' + 59: 0, # 'Ö' + 60: 0, # 'Ú' + 63: 0, # 'Ü' + 14: 0, # 'á' + 15: 0, # 'é' + 30: 0, # 'í' + 25: 0, # 'ó' + 24: 0, # 'ö' + 31: 0, # 'ú' + 29: 0, # 'ü' + 42: 0, # 'Å‘' + 56: 0, # 'ű' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 72, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 161, # '€' + 129: 162, # None + 130: 163, # '‚' + 131: 164, # None + 132: 165, # '„' + 133: 166, # '…' + 134: 167, # '†' + 135: 168, # '‡' + 136: 169, # None + 137: 170, # '‰' + 138: 171, # 'Å ' + 139: 172, # '‹' + 140: 173, # 'Åš' + 141: 174, # 'Ť' + 142: 175, # 'Ž' + 143: 176, # 'Ź' + 144: 177, # None + 145: 178, # '‘' + 146: 179, # '’' + 147: 180, # '“' + 148: 78, # 'â€' + 149: 181, # '•' + 150: 69, # '–' + 151: 182, # '—' + 152: 183, # None + 153: 184, # 'â„¢' + 154: 185, # 'Å¡' + 155: 186, # '›' + 156: 187, # 'Å›' + 157: 188, # 'Å¥' + 158: 189, # 'ž' + 159: 190, # 'ź' + 160: 191, # '\xa0' + 161: 192, # 'ˇ' + 162: 193, # '˘' + 163: 194, # 'Å' + 164: 195, # '¤' + 165: 196, # 'Ä„' + 166: 197, # '¦' + 167: 76, # '§' + 168: 198, # '¨' + 169: 199, # '©' + 170: 200, # 'Åž' + 171: 201, # '«' + 172: 202, # '¬' + 173: 203, # '\xad' + 174: 204, # '®' + 175: 205, # 'Å»' + 176: 81, # '°' + 177: 206, # '±' + 178: 207, # 'Ë›' + 179: 208, # 'Å‚' + 180: 209, # '´' + 181: 210, # 'µ' + 182: 211, # '¶' + 183: 212, # '·' + 184: 213, # '¸' + 185: 214, # 'Ä…' + 186: 215, # 'ÅŸ' + 187: 216, # '»' + 188: 217, # 'Ľ' + 189: 218, # 'Ë' + 190: 219, # 'ľ' + 191: 220, # 'ż' + 192: 221, # 'Å”' + 193: 51, # 'Ã' + 194: 83, # 'Â' + 195: 222, # 'Ä‚' + 196: 80, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'ÄŒ' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Äš' + 205: 61, # 'Ã' + 206: 230, # 'ÃŽ' + 207: 231, # 'ÄŽ' + 208: 232, # 'Ä' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Å' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Å®' + 218: 60, # 'Ú' + 219: 70, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ã' + 222: 240, # 'Å¢' + 223: 241, # 'ß' + 224: 84, # 'Å•' + 225: 14, # 'á' + 226: 75, # 'â' + 227: 242, # 'ă' + 228: 71, # 'ä' + 229: 82, # 'ĺ' + 230: 243, # 'ć' + 231: 73, # 'ç' + 232: 244, # 'Ä' + 233: 15, # 'é' + 234: 85, # 'Ä™' + 235: 79, # 'ë' + 236: 86, # 'Ä›' + 237: 30, # 'í' + 238: 77, # 'î' + 239: 87, # 'Ä' + 240: 245, # 'Ä‘' + 241: 246, # 'Å„' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 74, # 'ô' + 245: 42, # 'Å‘' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'Å™' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'Å£' + 255: 253, # 'Ë™' +} + +WINDOWS_1250_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1250', + language='Hungarian', + char_to_order_map=WINDOWS_1250_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÃÉÃÓÖÚÜáéíóöúüÅőŰű') + +ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 28, # 'A' + 66: 40, # 'B' + 67: 54, # 'C' + 68: 45, # 'D' + 69: 32, # 'E' + 70: 50, # 'F' + 71: 49, # 'G' + 72: 38, # 'H' + 73: 39, # 'I' + 74: 53, # 'J' + 75: 36, # 'K' + 76: 41, # 'L' + 77: 34, # 'M' + 78: 35, # 'N' + 79: 47, # 'O' + 80: 46, # 'P' + 81: 71, # 'Q' + 82: 43, # 'R' + 83: 33, # 'S' + 84: 37, # 'T' + 85: 57, # 'U' + 86: 48, # 'V' + 87: 64, # 'W' + 88: 68, # 'X' + 89: 55, # 'Y' + 90: 52, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 2, # 'a' + 98: 18, # 'b' + 99: 26, # 'c' + 100: 17, # 'd' + 101: 1, # 'e' + 102: 27, # 'f' + 103: 12, # 'g' + 104: 20, # 'h' + 105: 9, # 'i' + 106: 22, # 'j' + 107: 7, # 'k' + 108: 6, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 8, # 'o' + 112: 23, # 'p' + 113: 67, # 'q' + 114: 10, # 'r' + 115: 5, # 's' + 116: 3, # 't' + 117: 21, # 'u' + 118: 19, # 'v' + 119: 65, # 'w' + 120: 62, # 'x' + 121: 16, # 'y' + 122: 11, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 159, # '\x80' + 129: 160, # '\x81' + 130: 161, # '\x82' + 131: 162, # '\x83' + 132: 163, # '\x84' + 133: 164, # '\x85' + 134: 165, # '\x86' + 135: 166, # '\x87' + 136: 167, # '\x88' + 137: 168, # '\x89' + 138: 169, # '\x8a' + 139: 170, # '\x8b' + 140: 171, # '\x8c' + 141: 172, # '\x8d' + 142: 173, # '\x8e' + 143: 174, # '\x8f' + 144: 175, # '\x90' + 145: 176, # '\x91' + 146: 177, # '\x92' + 147: 178, # '\x93' + 148: 179, # '\x94' + 149: 180, # '\x95' + 150: 181, # '\x96' + 151: 182, # '\x97' + 152: 183, # '\x98' + 153: 184, # '\x99' + 154: 185, # '\x9a' + 155: 186, # '\x9b' + 156: 187, # '\x9c' + 157: 188, # '\x9d' + 158: 189, # '\x9e' + 159: 190, # '\x9f' + 160: 191, # '\xa0' + 161: 192, # 'Ä„' + 162: 193, # '˘' + 163: 194, # 'Å' + 164: 195, # '¤' + 165: 196, # 'Ľ' + 166: 197, # 'Åš' + 167: 75, # '§' + 168: 198, # '¨' + 169: 199, # 'Å ' + 170: 200, # 'Åž' + 171: 201, # 'Ť' + 172: 202, # 'Ź' + 173: 203, # '\xad' + 174: 204, # 'Ž' + 175: 205, # 'Å»' + 176: 79, # '°' + 177: 206, # 'Ä…' + 178: 207, # 'Ë›' + 179: 208, # 'Å‚' + 180: 209, # '´' + 181: 210, # 'ľ' + 182: 211, # 'Å›' + 183: 212, # 'ˇ' + 184: 213, # '¸' + 185: 214, # 'Å¡' + 186: 215, # 'ÅŸ' + 187: 216, # 'Å¥' + 188: 217, # 'ź' + 189: 218, # 'Ë' + 190: 219, # 'ž' + 191: 220, # 'ż' + 192: 221, # 'Å”' + 193: 51, # 'Ã' + 194: 81, # 'Â' + 195: 222, # 'Ä‚' + 196: 78, # 'Ä' + 197: 223, # 'Ĺ' + 198: 224, # 'Ć' + 199: 225, # 'Ç' + 200: 226, # 'ÄŒ' + 201: 44, # 'É' + 202: 227, # 'Ę' + 203: 228, # 'Ë' + 204: 229, # 'Äš' + 205: 61, # 'Ã' + 206: 230, # 'ÃŽ' + 207: 231, # 'ÄŽ' + 208: 232, # 'Ä' + 209: 233, # 'Ń' + 210: 234, # 'Ň' + 211: 58, # 'Ó' + 212: 235, # 'Ô' + 213: 66, # 'Å' + 214: 59, # 'Ö' + 215: 236, # '×' + 216: 237, # 'Ř' + 217: 238, # 'Å®' + 218: 60, # 'Ú' + 219: 69, # 'Ű' + 220: 63, # 'Ü' + 221: 239, # 'Ã' + 222: 240, # 'Å¢' + 223: 241, # 'ß' + 224: 82, # 'Å•' + 225: 14, # 'á' + 226: 74, # 'â' + 227: 242, # 'ă' + 228: 70, # 'ä' + 229: 80, # 'ĺ' + 230: 243, # 'ć' + 231: 72, # 'ç' + 232: 244, # 'Ä' + 233: 15, # 'é' + 234: 83, # 'Ä™' + 235: 77, # 'ë' + 236: 84, # 'Ä›' + 237: 30, # 'í' + 238: 76, # 'î' + 239: 85, # 'Ä' + 240: 245, # 'Ä‘' + 241: 246, # 'Å„' + 242: 247, # 'ň' + 243: 25, # 'ó' + 244: 73, # 'ô' + 245: 42, # 'Å‘' + 246: 24, # 'ö' + 247: 248, # '÷' + 248: 249, # 'Å™' + 249: 250, # 'ů' + 250: 31, # 'ú' + 251: 56, # 'ű' + 252: 29, # 'ü' + 253: 251, # 'ý' + 254: 252, # 'Å£' + 255: 253, # 'Ë™' +} + +ISO_8859_2_HUNGARIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-2', + language='Hungarian', + char_to_order_map=ISO_8859_2_HUNGARIAN_CHAR_TO_ORDER, + language_model=HUNGARIAN_LANG_MODEL, + typical_positive_ratio=0.947368, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVZabcdefghijklmnoprstuvzÃÉÃÓÖÚÜáéíóöúüÅőŰű') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langrussianmodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langrussianmodel.py new file mode 100644 index 000000000..5594452b5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langrussianmodel.py @@ -0,0 +1,5718 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +RUSSIAN_LANG_MODEL = { + 37: { # 'Ð' + 37: 0, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 44: { # 'Б' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 2, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 33: { # 'Ð’' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 46: { # 'Г' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 41: { # 'Д' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 3, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 48: { # 'Е' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 56: { # 'Ж' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 51: { # 'З' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 42: { # 'И' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 2, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 60: { # 'Й' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 36: { # 'К' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 2, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 49: { # 'Л' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 0, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 38: { # 'М' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 31: { # 'Ð' + 37: 2, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 34: { # 'О' + 37: 0, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 2, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 1, # 'З' + 42: 1, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 2, # 'Л' + 38: 1, # 'М' + 31: 2, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 1, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 35: { # 'П' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 2, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 2, # 'у' + 39: 1, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 45: { # 'Р' + 37: 2, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 2, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 2, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 2, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 32: { # 'С' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 2, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 2, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 40: { # 'Т' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 2, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 1, # 'Ь' + 47: 1, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 52: { # 'У' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 1, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 1, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 0, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 53: { # 'Ф' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 55: { # 'Ð¥' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 2, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 0, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 58: { # 'Ц' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 1, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 50: { # 'Ч' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 1, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 57: { # 'Ш' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 1, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 63: { # 'Щ' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 1, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 62: { # 'Ы' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 1, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 61: { # 'Ь' + 37: 0, # 'Ð' + 44: 1, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 1, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 1, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 47: { # 'Э' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 1, # 'Й' + 36: 1, # 'К' + 49: 1, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 1, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 59: { # 'Ю' + 37: 1, # 'Ð' + 44: 1, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 1, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 0, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 43: { # 'Я' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 1, # 'Ð’' + 46: 1, # 'Г' + 41: 0, # 'Д' + 48: 1, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 1, # 'С' + 40: 1, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 1, # 'Ð¥' + 58: 0, # 'Ц' + 50: 1, # 'Ч' + 57: 0, # 'Ш' + 63: 1, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 1, # 'Ю' + 43: 1, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 0, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 1, # 'й' + 11: 1, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 1, # 'п' + 9: 1, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 3: { # 'а' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 1, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 21: { # 'б' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 1, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 10: { # 'в' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 19: { # 'г' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 13: { # 'д' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 3, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 2: { # 'е' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 24: { # 'ж' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 20: { # 'з' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 1, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 4: { # 'и' + 37: 1, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 23: { # 'й' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 1, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 11: { # 'к' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 1, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 8: { # 'л' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 1, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 1, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 12: { # 'м' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 5: { # 'н' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 3, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 1, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 1: { # 'о' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 3, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 15: { # 'п' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 3, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 1, # 'ш' + 29: 1, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 9: { # 'Ñ€' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 7: { # 'Ñ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 1, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 2, # 'ш' + 29: 1, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 6: { # 'Ñ‚' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 2, # 'щ' + 54: 2, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 14: { # 'у' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 3, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 2, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 39: { # 'Ñ„' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 0, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 2, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 2, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 2, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 26: { # 'Ñ…' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 3, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 1, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 1, # 'п' + 9: 3, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 2, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 1, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 28: { # 'ц' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 1, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 2, # 'к' + 8: 1, # 'л' + 12: 1, # 'м' + 5: 1, # 'н' + 1: 3, # 'о' + 15: 0, # 'п' + 9: 1, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 1, # 'Ñ‚' + 14: 3, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 1, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 3, # 'Ñ‹' + 17: 1, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 22: { # 'ч' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 2, # 'л' + 12: 1, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 3, # 'у' + 39: 1, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 1, # 'ч' + 25: 2, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 25: { # 'ш' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 1, # 'б' + 10: 2, # 'в' + 19: 1, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 2, # 'м' + 5: 3, # 'н' + 1: 3, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 1, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 3, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 1, # 'ц' + 22: 1, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 3, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 29: { # 'щ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 3, # 'а' + 21: 0, # 'б' + 10: 1, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 3, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 3, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 1, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 0, # 'п' + 9: 2, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 2, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 2, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 0, # 'Ñ' + }, + 54: { # 'ÑŠ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 0, # 'б' + 10: 0, # 'в' + 19: 0, # 'г' + 13: 0, # 'д' + 2: 2, # 'е' + 24: 0, # 'ж' + 20: 0, # 'з' + 4: 0, # 'и' + 23: 0, # 'й' + 11: 0, # 'к' + 8: 0, # 'л' + 12: 0, # 'м' + 5: 0, # 'н' + 1: 0, # 'о' + 15: 0, # 'п' + 9: 0, # 'Ñ€' + 7: 0, # 'Ñ' + 6: 0, # 'Ñ‚' + 14: 0, # 'у' + 39: 0, # 'Ñ„' + 26: 0, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 0, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 18: { # 'Ñ‹' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 3, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 2, # 'и' + 23: 3, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 1, # 'о' + 15: 3, # 'п' + 9: 3, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 0, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 3, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 0, # 'ÑŽ' + 16: 2, # 'Ñ' + }, + 17: { # 'ÑŒ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 2, # 'в' + 19: 2, # 'г' + 13: 2, # 'д' + 2: 3, # 'е' + 24: 1, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 0, # 'й' + 11: 3, # 'к' + 8: 0, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 2, # 'о' + 15: 2, # 'п' + 9: 1, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 2, # 'Ñ‚' + 14: 0, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 3, # 'ш' + 29: 2, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 3, # 'ÑŽ' + 16: 3, # 'Ñ' + }, + 30: { # 'Ñ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 1, # 'М' + 31: 1, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 1, # 'Р' + 32: 1, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 1, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 1, # 'б' + 10: 1, # 'в' + 19: 1, # 'г' + 13: 2, # 'д' + 2: 1, # 'е' + 24: 0, # 'ж' + 20: 1, # 'з' + 4: 0, # 'и' + 23: 2, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 2, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 2, # 'Ñ„' + 26: 1, # 'Ñ…' + 28: 0, # 'ц' + 22: 0, # 'ч' + 25: 1, # 'ш' + 29: 0, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 1, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 27: { # 'ÑŽ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 2, # 'а' + 21: 3, # 'б' + 10: 1, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 1, # 'е' + 24: 2, # 'ж' + 20: 2, # 'з' + 4: 1, # 'и' + 23: 1, # 'й' + 11: 2, # 'к' + 8: 2, # 'л' + 12: 2, # 'м' + 5: 2, # 'н' + 1: 1, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 0, # 'у' + 39: 1, # 'Ñ„' + 26: 2, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 1, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 1, # 'Ñ' + }, + 16: { # 'Ñ' + 37: 0, # 'Ð' + 44: 0, # 'Б' + 33: 0, # 'Ð’' + 46: 0, # 'Г' + 41: 0, # 'Д' + 48: 0, # 'Е' + 56: 0, # 'Ж' + 51: 0, # 'З' + 42: 0, # 'И' + 60: 0, # 'Й' + 36: 0, # 'К' + 49: 0, # 'Л' + 38: 0, # 'М' + 31: 0, # 'Ð' + 34: 0, # 'О' + 35: 0, # 'П' + 45: 0, # 'Р' + 32: 0, # 'С' + 40: 0, # 'Т' + 52: 0, # 'У' + 53: 0, # 'Ф' + 55: 0, # 'Ð¥' + 58: 0, # 'Ц' + 50: 0, # 'Ч' + 57: 0, # 'Ш' + 63: 0, # 'Щ' + 62: 0, # 'Ы' + 61: 0, # 'Ь' + 47: 0, # 'Э' + 59: 0, # 'Ю' + 43: 0, # 'Я' + 3: 0, # 'а' + 21: 2, # 'б' + 10: 3, # 'в' + 19: 2, # 'г' + 13: 3, # 'д' + 2: 3, # 'е' + 24: 3, # 'ж' + 20: 3, # 'з' + 4: 2, # 'и' + 23: 2, # 'й' + 11: 3, # 'к' + 8: 3, # 'л' + 12: 3, # 'м' + 5: 3, # 'н' + 1: 0, # 'о' + 15: 2, # 'п' + 9: 2, # 'Ñ€' + 7: 3, # 'Ñ' + 6: 3, # 'Ñ‚' + 14: 1, # 'у' + 39: 1, # 'Ñ„' + 26: 3, # 'Ñ…' + 28: 2, # 'ц' + 22: 2, # 'ч' + 25: 2, # 'ш' + 29: 3, # 'щ' + 54: 0, # 'ÑŠ' + 18: 0, # 'Ñ‹' + 17: 0, # 'ÑŒ' + 30: 0, # 'Ñ' + 27: 2, # 'ÑŽ' + 16: 2, # 'Ñ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +IBM866_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'Ð' + 129: 44, # 'Б' + 130: 33, # 'Ð’' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Ð' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Ð¥' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 3, # 'а' + 161: 21, # 'б' + 162: 10, # 'в' + 163: 19, # 'г' + 164: 13, # 'д' + 165: 2, # 'е' + 166: 24, # 'ж' + 167: 20, # 'з' + 168: 4, # 'и' + 169: 23, # 'й' + 170: 11, # 'к' + 171: 8, # 'л' + 172: 12, # 'м' + 173: 5, # 'н' + 174: 1, # 'о' + 175: 15, # 'п' + 176: 191, # 'â–‘' + 177: 192, # 'â–’' + 178: 193, # 'â–“' + 179: 194, # '│' + 180: 195, # '┤' + 181: 196, # 'â•¡' + 182: 197, # 'â•¢' + 183: 198, # 'â•–' + 184: 199, # 'â••' + 185: 200, # 'â•£' + 186: 201, # 'â•‘' + 187: 202, # 'â•—' + 188: 203, # 'â•' + 189: 204, # '╜' + 190: 205, # 'â•›' + 191: 206, # 'â”' + 192: 207, # 'â””' + 193: 208, # 'â”´' + 194: 209, # '┬' + 195: 210, # '├' + 196: 211, # '─' + 197: 212, # '┼' + 198: 213, # '╞' + 199: 214, # '╟' + 200: 215, # '╚' + 201: 216, # 'â•”' + 202: 217, # 'â•©' + 203: 218, # '╦' + 204: 219, # 'â• ' + 205: 220, # 'â•' + 206: 221, # '╬' + 207: 222, # 'â•§' + 208: 223, # '╨' + 209: 224, # '╤' + 210: 225, # 'â•¥' + 211: 226, # 'â•™' + 212: 227, # '╘' + 213: 228, # 'â•’' + 214: 229, # 'â•“' + 215: 230, # 'â•«' + 216: 231, # '╪' + 217: 232, # '┘' + 218: 233, # '┌' + 219: 234, # 'â–ˆ' + 220: 235, # 'â–„' + 221: 236, # 'â–Œ' + 222: 237, # 'â–' + 223: 238, # 'â–€' + 224: 9, # 'Ñ€' + 225: 7, # 'Ñ' + 226: 6, # 'Ñ‚' + 227: 14, # 'у' + 228: 39, # 'Ñ„' + 229: 26, # 'Ñ…' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ÑŠ' + 235: 18, # 'Ñ‹' + 236: 17, # 'ÑŒ' + 237: 30, # 'Ñ' + 238: 27, # 'ÑŽ' + 239: 16, # 'Ñ' + 240: 239, # 'Ð' + 241: 68, # 'Ñ‘' + 242: 240, # 'Є' + 243: 241, # 'Ñ”' + 244: 242, # 'Ї' + 245: 243, # 'Ñ—' + 246: 244, # 'ÐŽ' + 247: 245, # 'Ñž' + 248: 246, # '°' + 249: 247, # '∙' + 250: 248, # '·' + 251: 249, # '√' + 252: 250, # 'â„–' + 253: 251, # '¤' + 254: 252, # 'â– ' + 255: 255, # '\xa0' +} + +IBM866_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM866', + language='Russian', + char_to_order_map=IBM866_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ђ' + 129: 192, # 'Ѓ' + 130: 193, # '‚' + 131: 194, # 'Ñ“' + 132: 195, # '„' + 133: 196, # '…' + 134: 197, # '†' + 135: 198, # '‡' + 136: 199, # '€' + 137: 200, # '‰' + 138: 201, # 'Љ' + 139: 202, # '‹' + 140: 203, # 'Њ' + 141: 204, # 'ÐŒ' + 142: 205, # 'Ћ' + 143: 206, # 'Ð' + 144: 207, # 'Ñ’' + 145: 208, # '‘' + 146: 209, # '’' + 147: 210, # '“' + 148: 211, # 'â€' + 149: 212, # '•' + 150: 213, # '–' + 151: 214, # '—' + 152: 215, # None + 153: 216, # 'â„¢' + 154: 217, # 'Ñ™' + 155: 218, # '›' + 156: 219, # 'Ñš' + 157: 220, # 'Ñœ' + 158: 221, # 'Ñ›' + 159: 222, # 'ÑŸ' + 160: 223, # '\xa0' + 161: 224, # 'ÐŽ' + 162: 225, # 'Ñž' + 163: 226, # 'Ј' + 164: 227, # '¤' + 165: 228, # 'Ò' + 166: 229, # '¦' + 167: 230, # '§' + 168: 231, # 'Ð' + 169: 232, # '©' + 170: 233, # 'Є' + 171: 234, # '«' + 172: 235, # '¬' + 173: 236, # '\xad' + 174: 237, # '®' + 175: 238, # 'Ї' + 176: 239, # '°' + 177: 240, # '±' + 178: 241, # 'І' + 179: 242, # 'Ñ–' + 180: 243, # 'Ò‘' + 181: 244, # 'µ' + 182: 245, # '¶' + 183: 246, # '·' + 184: 68, # 'Ñ‘' + 185: 247, # 'â„–' + 186: 248, # 'Ñ”' + 187: 249, # '»' + 188: 250, # 'ј' + 189: 251, # 'Ð…' + 190: 252, # 'Ñ•' + 191: 253, # 'Ñ—' + 192: 37, # 'Ð' + 193: 44, # 'Б' + 194: 33, # 'Ð’' + 195: 46, # 'Г' + 196: 41, # 'Д' + 197: 48, # 'Е' + 198: 56, # 'Ж' + 199: 51, # 'З' + 200: 42, # 'И' + 201: 60, # 'Й' + 202: 36, # 'К' + 203: 49, # 'Л' + 204: 38, # 'М' + 205: 31, # 'Ð' + 206: 34, # 'О' + 207: 35, # 'П' + 208: 45, # 'Р' + 209: 32, # 'С' + 210: 40, # 'Т' + 211: 52, # 'У' + 212: 53, # 'Ф' + 213: 55, # 'Ð¥' + 214: 58, # 'Ц' + 215: 50, # 'Ч' + 216: 57, # 'Ш' + 217: 63, # 'Щ' + 218: 70, # 'Ъ' + 219: 62, # 'Ы' + 220: 61, # 'Ь' + 221: 47, # 'Э' + 222: 59, # 'Ю' + 223: 43, # 'Я' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'Ñ€' + 241: 7, # 'Ñ' + 242: 6, # 'Ñ‚' + 243: 14, # 'у' + 244: 39, # 'Ñ„' + 245: 26, # 'Ñ…' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ÑŠ' + 251: 18, # 'Ñ‹' + 252: 17, # 'ÑŒ' + 253: 30, # 'Ñ' + 254: 27, # 'ÑŽ' + 255: 16, # 'Ñ' +} + +WINDOWS_1251_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='windows-1251', + language='Russian', + char_to_order_map=WINDOWS_1251_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +IBM855_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # 'Ñ’' + 129: 192, # 'Ђ' + 130: 193, # 'Ñ“' + 131: 194, # 'Ѓ' + 132: 68, # 'Ñ‘' + 133: 195, # 'Ð' + 134: 196, # 'Ñ”' + 135: 197, # 'Є' + 136: 198, # 'Ñ•' + 137: 199, # 'Ð…' + 138: 200, # 'Ñ–' + 139: 201, # 'І' + 140: 202, # 'Ñ—' + 141: 203, # 'Ї' + 142: 204, # 'ј' + 143: 205, # 'Ј' + 144: 206, # 'Ñ™' + 145: 207, # 'Љ' + 146: 208, # 'Ñš' + 147: 209, # 'Њ' + 148: 210, # 'Ñ›' + 149: 211, # 'Ћ' + 150: 212, # 'Ñœ' + 151: 213, # 'ÐŒ' + 152: 214, # 'Ñž' + 153: 215, # 'ÐŽ' + 154: 216, # 'ÑŸ' + 155: 217, # 'Ð' + 156: 27, # 'ÑŽ' + 157: 59, # 'Ю' + 158: 54, # 'ÑŠ' + 159: 70, # 'Ъ' + 160: 3, # 'а' + 161: 37, # 'Ð' + 162: 21, # 'б' + 163: 44, # 'Б' + 164: 28, # 'ц' + 165: 58, # 'Ц' + 166: 13, # 'д' + 167: 41, # 'Д' + 168: 2, # 'е' + 169: 48, # 'Е' + 170: 39, # 'Ñ„' + 171: 53, # 'Ф' + 172: 19, # 'г' + 173: 46, # 'Г' + 174: 218, # '«' + 175: 219, # '»' + 176: 220, # 'â–‘' + 177: 221, # 'â–’' + 178: 222, # 'â–“' + 179: 223, # '│' + 180: 224, # '┤' + 181: 26, # 'Ñ…' + 182: 55, # 'Ð¥' + 183: 4, # 'и' + 184: 42, # 'И' + 185: 225, # 'â•£' + 186: 226, # 'â•‘' + 187: 227, # 'â•—' + 188: 228, # 'â•' + 189: 23, # 'й' + 190: 60, # 'Й' + 191: 229, # 'â”' + 192: 230, # 'â””' + 193: 231, # 'â”´' + 194: 232, # '┬' + 195: 233, # '├' + 196: 234, # '─' + 197: 235, # '┼' + 198: 11, # 'к' + 199: 36, # 'К' + 200: 236, # '╚' + 201: 237, # 'â•”' + 202: 238, # 'â•©' + 203: 239, # '╦' + 204: 240, # 'â• ' + 205: 241, # 'â•' + 206: 242, # '╬' + 207: 243, # '¤' + 208: 8, # 'л' + 209: 49, # 'Л' + 210: 12, # 'м' + 211: 38, # 'М' + 212: 5, # 'н' + 213: 31, # 'Ð' + 214: 1, # 'о' + 215: 34, # 'О' + 216: 15, # 'п' + 217: 244, # '┘' + 218: 245, # '┌' + 219: 246, # 'â–ˆ' + 220: 247, # 'â–„' + 221: 35, # 'П' + 222: 16, # 'Ñ' + 223: 248, # 'â–€' + 224: 43, # 'Я' + 225: 9, # 'Ñ€' + 226: 45, # 'Р' + 227: 7, # 'Ñ' + 228: 32, # 'С' + 229: 6, # 'Ñ‚' + 230: 40, # 'Т' + 231: 14, # 'у' + 232: 52, # 'У' + 233: 24, # 'ж' + 234: 56, # 'Ж' + 235: 10, # 'в' + 236: 33, # 'Ð’' + 237: 17, # 'ÑŒ' + 238: 61, # 'Ь' + 239: 249, # 'â„–' + 240: 250, # '\xad' + 241: 18, # 'Ñ‹' + 242: 62, # 'Ы' + 243: 20, # 'з' + 244: 51, # 'З' + 245: 25, # 'ш' + 246: 57, # 'Ш' + 247: 30, # 'Ñ' + 248: 47, # 'Э' + 249: 29, # 'щ' + 250: 63, # 'Щ' + 251: 22, # 'ч' + 252: 50, # 'Ч' + 253: 251, # '§' + 254: 252, # 'â– ' + 255: 255, # '\xa0' +} + +IBM855_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='IBM855', + language='Russian', + char_to_order_map=IBM855_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +KOI8_R_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '─' + 129: 192, # '│' + 130: 193, # '┌' + 131: 194, # 'â”' + 132: 195, # 'â””' + 133: 196, # '┘' + 134: 197, # '├' + 135: 198, # '┤' + 136: 199, # '┬' + 137: 200, # 'â”´' + 138: 201, # '┼' + 139: 202, # 'â–€' + 140: 203, # 'â–„' + 141: 204, # 'â–ˆ' + 142: 205, # 'â–Œ' + 143: 206, # 'â–' + 144: 207, # 'â–‘' + 145: 208, # 'â–’' + 146: 209, # 'â–“' + 147: 210, # '⌠' + 148: 211, # 'â– ' + 149: 212, # '∙' + 150: 213, # '√' + 151: 214, # '≈' + 152: 215, # '≤' + 153: 216, # '≥' + 154: 217, # '\xa0' + 155: 218, # '⌡' + 156: 219, # '°' + 157: 220, # '²' + 158: 221, # '·' + 159: 222, # '÷' + 160: 223, # 'â•' + 161: 224, # 'â•‘' + 162: 225, # 'â•’' + 163: 68, # 'Ñ‘' + 164: 226, # 'â•“' + 165: 227, # 'â•”' + 166: 228, # 'â••' + 167: 229, # 'â•–' + 168: 230, # 'â•—' + 169: 231, # '╘' + 170: 232, # 'â•™' + 171: 233, # '╚' + 172: 234, # 'â•›' + 173: 235, # '╜' + 174: 236, # 'â•' + 175: 237, # '╞' + 176: 238, # '╟' + 177: 239, # 'â• ' + 178: 240, # 'â•¡' + 179: 241, # 'Ð' + 180: 242, # 'â•¢' + 181: 243, # 'â•£' + 182: 244, # '╤' + 183: 245, # 'â•¥' + 184: 246, # '╦' + 185: 247, # 'â•§' + 186: 248, # '╨' + 187: 249, # 'â•©' + 188: 250, # '╪' + 189: 251, # 'â•«' + 190: 252, # '╬' + 191: 253, # '©' + 192: 27, # 'ÑŽ' + 193: 3, # 'а' + 194: 21, # 'б' + 195: 28, # 'ц' + 196: 13, # 'д' + 197: 2, # 'е' + 198: 39, # 'Ñ„' + 199: 19, # 'г' + 200: 26, # 'Ñ…' + 201: 4, # 'и' + 202: 23, # 'й' + 203: 11, # 'к' + 204: 8, # 'л' + 205: 12, # 'м' + 206: 5, # 'н' + 207: 1, # 'о' + 208: 15, # 'п' + 209: 16, # 'Ñ' + 210: 9, # 'Ñ€' + 211: 7, # 'Ñ' + 212: 6, # 'Ñ‚' + 213: 14, # 'у' + 214: 24, # 'ж' + 215: 10, # 'в' + 216: 17, # 'ÑŒ' + 217: 18, # 'Ñ‹' + 218: 20, # 'з' + 219: 25, # 'ш' + 220: 30, # 'Ñ' + 221: 29, # 'щ' + 222: 22, # 'ч' + 223: 54, # 'ÑŠ' + 224: 59, # 'Ю' + 225: 37, # 'Ð' + 226: 44, # 'Б' + 227: 58, # 'Ц' + 228: 41, # 'Д' + 229: 48, # 'Е' + 230: 53, # 'Ф' + 231: 46, # 'Г' + 232: 55, # 'Ð¥' + 233: 42, # 'И' + 234: 60, # 'Й' + 235: 36, # 'К' + 236: 49, # 'Л' + 237: 38, # 'М' + 238: 31, # 'Ð' + 239: 34, # 'О' + 240: 35, # 'П' + 241: 43, # 'Я' + 242: 45, # 'Р' + 243: 32, # 'С' + 244: 40, # 'Т' + 245: 52, # 'У' + 246: 56, # 'Ж' + 247: 33, # 'Ð’' + 248: 61, # 'Ь' + 249: 62, # 'Ы' + 250: 51, # 'З' + 251: 57, # 'Ш' + 252: 47, # 'Э' + 253: 63, # 'Щ' + 254: 50, # 'Ч' + 255: 70, # 'Ъ' +} + +KOI8_R_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='KOI8-R', + language='Russian', + char_to_order_map=KOI8_R_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 37, # 'Ð' + 129: 44, # 'Б' + 130: 33, # 'Ð’' + 131: 46, # 'Г' + 132: 41, # 'Д' + 133: 48, # 'Е' + 134: 56, # 'Ж' + 135: 51, # 'З' + 136: 42, # 'И' + 137: 60, # 'Й' + 138: 36, # 'К' + 139: 49, # 'Л' + 140: 38, # 'М' + 141: 31, # 'Ð' + 142: 34, # 'О' + 143: 35, # 'П' + 144: 45, # 'Р' + 145: 32, # 'С' + 146: 40, # 'Т' + 147: 52, # 'У' + 148: 53, # 'Ф' + 149: 55, # 'Ð¥' + 150: 58, # 'Ц' + 151: 50, # 'Ч' + 152: 57, # 'Ш' + 153: 63, # 'Щ' + 154: 70, # 'Ъ' + 155: 62, # 'Ы' + 156: 61, # 'Ь' + 157: 47, # 'Э' + 158: 59, # 'Ю' + 159: 43, # 'Я' + 160: 191, # '†' + 161: 192, # '°' + 162: 193, # 'Ò' + 163: 194, # '£' + 164: 195, # '§' + 165: 196, # '•' + 166: 197, # '¶' + 167: 198, # 'І' + 168: 199, # '®' + 169: 200, # '©' + 170: 201, # 'â„¢' + 171: 202, # 'Ђ' + 172: 203, # 'Ñ’' + 173: 204, # '≠' + 174: 205, # 'Ѓ' + 175: 206, # 'Ñ“' + 176: 207, # '∞' + 177: 208, # '±' + 178: 209, # '≤' + 179: 210, # '≥' + 180: 211, # 'Ñ–' + 181: 212, # 'µ' + 182: 213, # 'Ò‘' + 183: 214, # 'Ј' + 184: 215, # 'Є' + 185: 216, # 'Ñ”' + 186: 217, # 'Ї' + 187: 218, # 'Ñ—' + 188: 219, # 'Љ' + 189: 220, # 'Ñ™' + 190: 221, # 'Њ' + 191: 222, # 'Ñš' + 192: 223, # 'ј' + 193: 224, # 'Ð…' + 194: 225, # '¬' + 195: 226, # '√' + 196: 227, # 'Æ’' + 197: 228, # '≈' + 198: 229, # '∆' + 199: 230, # '«' + 200: 231, # '»' + 201: 232, # '…' + 202: 233, # '\xa0' + 203: 234, # 'Ћ' + 204: 235, # 'Ñ›' + 205: 236, # 'ÐŒ' + 206: 237, # 'Ñœ' + 207: 238, # 'Ñ•' + 208: 239, # '–' + 209: 240, # '—' + 210: 241, # '“' + 211: 242, # 'â€' + 212: 243, # '‘' + 213: 244, # '’' + 214: 245, # '÷' + 215: 246, # '„' + 216: 247, # 'ÐŽ' + 217: 248, # 'Ñž' + 218: 249, # 'Ð' + 219: 250, # 'ÑŸ' + 220: 251, # 'â„–' + 221: 252, # 'Ð' + 222: 68, # 'Ñ‘' + 223: 16, # 'Ñ' + 224: 3, # 'а' + 225: 21, # 'б' + 226: 10, # 'в' + 227: 19, # 'г' + 228: 13, # 'д' + 229: 2, # 'е' + 230: 24, # 'ж' + 231: 20, # 'з' + 232: 4, # 'и' + 233: 23, # 'й' + 234: 11, # 'к' + 235: 8, # 'л' + 236: 12, # 'м' + 237: 5, # 'н' + 238: 1, # 'о' + 239: 15, # 'п' + 240: 9, # 'Ñ€' + 241: 7, # 'Ñ' + 242: 6, # 'Ñ‚' + 243: 14, # 'у' + 244: 39, # 'Ñ„' + 245: 26, # 'Ñ…' + 246: 28, # 'ц' + 247: 22, # 'ч' + 248: 25, # 'ш' + 249: 29, # 'щ' + 250: 54, # 'ÑŠ' + 251: 18, # 'Ñ‹' + 252: 17, # 'ÑŒ' + 253: 30, # 'Ñ' + 254: 27, # 'ÑŽ' + 255: 255, # '€' +} + +MACCYRILLIC_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='MacCyrillic', + language='Russian', + char_to_order_map=MACCYRILLIC_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + +ISO_8859_5_RUSSIAN_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 142, # 'A' + 66: 143, # 'B' + 67: 144, # 'C' + 68: 145, # 'D' + 69: 146, # 'E' + 70: 147, # 'F' + 71: 148, # 'G' + 72: 149, # 'H' + 73: 150, # 'I' + 74: 151, # 'J' + 75: 152, # 'K' + 76: 74, # 'L' + 77: 153, # 'M' + 78: 75, # 'N' + 79: 154, # 'O' + 80: 155, # 'P' + 81: 156, # 'Q' + 82: 157, # 'R' + 83: 158, # 'S' + 84: 159, # 'T' + 85: 160, # 'U' + 86: 161, # 'V' + 87: 162, # 'W' + 88: 163, # 'X' + 89: 164, # 'Y' + 90: 165, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 71, # 'a' + 98: 172, # 'b' + 99: 66, # 'c' + 100: 173, # 'd' + 101: 65, # 'e' + 102: 174, # 'f' + 103: 76, # 'g' + 104: 175, # 'h' + 105: 64, # 'i' + 106: 176, # 'j' + 107: 177, # 'k' + 108: 77, # 'l' + 109: 72, # 'm' + 110: 178, # 'n' + 111: 69, # 'o' + 112: 67, # 'p' + 113: 179, # 'q' + 114: 78, # 'r' + 115: 73, # 's' + 116: 180, # 't' + 117: 181, # 'u' + 118: 79, # 'v' + 119: 182, # 'w' + 120: 183, # 'x' + 121: 184, # 'y' + 122: 185, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 191, # '\x80' + 129: 192, # '\x81' + 130: 193, # '\x82' + 131: 194, # '\x83' + 132: 195, # '\x84' + 133: 196, # '\x85' + 134: 197, # '\x86' + 135: 198, # '\x87' + 136: 199, # '\x88' + 137: 200, # '\x89' + 138: 201, # '\x8a' + 139: 202, # '\x8b' + 140: 203, # '\x8c' + 141: 204, # '\x8d' + 142: 205, # '\x8e' + 143: 206, # '\x8f' + 144: 207, # '\x90' + 145: 208, # '\x91' + 146: 209, # '\x92' + 147: 210, # '\x93' + 148: 211, # '\x94' + 149: 212, # '\x95' + 150: 213, # '\x96' + 151: 214, # '\x97' + 152: 215, # '\x98' + 153: 216, # '\x99' + 154: 217, # '\x9a' + 155: 218, # '\x9b' + 156: 219, # '\x9c' + 157: 220, # '\x9d' + 158: 221, # '\x9e' + 159: 222, # '\x9f' + 160: 223, # '\xa0' + 161: 224, # 'Ð' + 162: 225, # 'Ђ' + 163: 226, # 'Ѓ' + 164: 227, # 'Є' + 165: 228, # 'Ð…' + 166: 229, # 'І' + 167: 230, # 'Ї' + 168: 231, # 'Ј' + 169: 232, # 'Љ' + 170: 233, # 'Њ' + 171: 234, # 'Ћ' + 172: 235, # 'ÐŒ' + 173: 236, # '\xad' + 174: 237, # 'ÐŽ' + 175: 238, # 'Ð' + 176: 37, # 'Ð' + 177: 44, # 'Б' + 178: 33, # 'Ð’' + 179: 46, # 'Г' + 180: 41, # 'Д' + 181: 48, # 'Е' + 182: 56, # 'Ж' + 183: 51, # 'З' + 184: 42, # 'И' + 185: 60, # 'Й' + 186: 36, # 'К' + 187: 49, # 'Л' + 188: 38, # 'М' + 189: 31, # 'Ð' + 190: 34, # 'О' + 191: 35, # 'П' + 192: 45, # 'Р' + 193: 32, # 'С' + 194: 40, # 'Т' + 195: 52, # 'У' + 196: 53, # 'Ф' + 197: 55, # 'Ð¥' + 198: 58, # 'Ц' + 199: 50, # 'Ч' + 200: 57, # 'Ш' + 201: 63, # 'Щ' + 202: 70, # 'Ъ' + 203: 62, # 'Ы' + 204: 61, # 'Ь' + 205: 47, # 'Э' + 206: 59, # 'Ю' + 207: 43, # 'Я' + 208: 3, # 'а' + 209: 21, # 'б' + 210: 10, # 'в' + 211: 19, # 'г' + 212: 13, # 'д' + 213: 2, # 'е' + 214: 24, # 'ж' + 215: 20, # 'з' + 216: 4, # 'и' + 217: 23, # 'й' + 218: 11, # 'к' + 219: 8, # 'л' + 220: 12, # 'м' + 221: 5, # 'н' + 222: 1, # 'о' + 223: 15, # 'п' + 224: 9, # 'Ñ€' + 225: 7, # 'Ñ' + 226: 6, # 'Ñ‚' + 227: 14, # 'у' + 228: 39, # 'Ñ„' + 229: 26, # 'Ñ…' + 230: 28, # 'ц' + 231: 22, # 'ч' + 232: 25, # 'ш' + 233: 29, # 'щ' + 234: 54, # 'ÑŠ' + 235: 18, # 'Ñ‹' + 236: 17, # 'ÑŒ' + 237: 30, # 'Ñ' + 238: 27, # 'ÑŽ' + 239: 16, # 'Ñ' + 240: 239, # 'â„–' + 241: 68, # 'Ñ‘' + 242: 240, # 'Ñ’' + 243: 241, # 'Ñ“' + 244: 242, # 'Ñ”' + 245: 243, # 'Ñ•' + 246: 244, # 'Ñ–' + 247: 245, # 'Ñ—' + 248: 246, # 'ј' + 249: 247, # 'Ñ™' + 250: 248, # 'Ñš' + 251: 249, # 'Ñ›' + 252: 250, # 'Ñœ' + 253: 251, # '§' + 254: 252, # 'Ñž' + 255: 255, # 'ÑŸ' +} + +ISO_8859_5_RUSSIAN_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-5', + language='Russian', + char_to_order_map=ISO_8859_5_RUSSIAN_CHAR_TO_ORDER, + language_model=RUSSIAN_LANG_MODEL, + typical_positive_ratio=0.976601, + keep_ascii_letters=False, + alphabet='ÐÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрÑтуфхцчшщъыьÑÑŽÑÑ‘') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langthaimodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langthaimodel.py new file mode 100644 index 000000000..9a37db573 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langthaimodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +THAI_LANG_MODEL = { + 5: { # 'à¸' + 5: 2, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 3, # 'ฎ' + 57: 2, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 1, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 30: { # 'ข' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 2, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 24: { # 'ค' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 2, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 3, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 8: { # 'ง' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 1, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 2, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 3, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 26: { # 'จ' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 52: { # 'ฉ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 34: { # 'ช' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 1, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 51: { # 'ซ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 47: { # 'à¸' + 5: 1, # 'à¸' + 30: 1, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 58: { # 'ฎ' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 57: { # 'à¸' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 49: { # 'à¸' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 53: { # 'ฑ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 55: { # 'ฒ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 43: { # 'ณ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 3, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 3, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 20: { # 'ด' + 5: 2, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 2, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 2, # '็' + 6: 1, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 19: { # 'ต' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 2, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 1, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 44: { # 'ถ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 1, # 'ี' + 40: 3, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 14: { # 'ท' + 5: 1, # 'à¸' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 3, # 'ศ' + 46: 1, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 1, # 'ื' + 32: 3, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 48: { # 'ธ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 2, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 3: { # 'น' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 1, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 3, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 3, # 'โ' + 29: 3, # 'ใ' + 33: 3, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 17: { # 'บ' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 2, # 'ื' + 32: 3, # 'ุ' + 35: 2, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 25: { # 'ป' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 1, # 'ฎ' + 57: 3, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 1, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 1, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 2, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 1, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 39: { # 'ผ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 0, # 'ุ' + 35: 3, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 62: { # 'à¸' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 1, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 2, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 2, # '่' + 7: 1, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 31: { # 'พ' + 5: 1, # 'à¸' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 1, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 1, # 'ึ' + 27: 3, # 'ื' + 32: 1, # 'ุ' + 35: 2, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 1, # '็' + 6: 0, # '่' + 7: 1, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 54: { # 'ฟ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 1, # 'ื' + 32: 1, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 45: { # 'ภ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 9: { # 'ม' + 5: 2, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 3, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 2, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 1, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 2, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 16: { # 'ย' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 1, # 'ึ' + 27: 2, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 2, # 'ๆ' + 37: 1, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 2: { # 'ร' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 2, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 3, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 3, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 2, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 3, # 'เ' + 28: 3, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 3, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 61: { # 'ฤ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 2, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 15: { # 'ล' + 5: 2, # 'à¸' + 30: 3, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 3, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 2, # 'ฯ' + 22: 3, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 2, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 2, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 12: { # 'ว' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 3, # 'ิ' + 13: 2, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 2, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 42: { # 'ศ' + 5: 1, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 2, # 'ิ' + 13: 0, # 'ี' + 40: 3, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 2, # 'ู' + 11: 0, # 'เ' + 28: 1, # 'à¹' + 41: 0, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 46: { # 'ษ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 2, # 'ฎ' + 57: 1, # 'à¸' + 49: 2, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 0, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 2, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 18: { # 'ส' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 3, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 3, # 'ำ' + 23: 3, # 'ิ' + 13: 3, # 'ี' + 40: 2, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 3, # 'ู' + 11: 2, # 'เ' + 28: 0, # 'à¹' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 1, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 21: { # 'ห' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 1, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 0, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 0, # 'ำ' + 23: 1, # 'ิ' + 13: 1, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 1, # 'ุ' + 35: 1, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 3, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 4: { # 'อ' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 2, # 'ะ' + 10: 3, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 2, # 'ิ' + 13: 3, # 'ี' + 40: 0, # 'ึ' + 27: 3, # 'ื' + 32: 3, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 1, # '็' + 6: 2, # '่' + 7: 2, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 63: { # 'ฯ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 22: { # 'ะ' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 10: { # 'ั' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 3, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 2, # 'à¸' + 53: 0, # 'ฑ' + 55: 3, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 1: { # 'า' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 1, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 2, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 1, # 'à¸' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 3, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 3, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 36: { # 'ำ' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 23: { # 'ิ' + 5: 3, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 3, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 2, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 3, # 'ศ' + 46: 2, # 'ษ' + 18: 2, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 2, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 13: { # 'ี' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 1, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 2, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 40: { # 'ึ' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 1, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 27: { # 'ื' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 32: { # 'ุ' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 3, # 'ค' + 8: 3, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 1, # 'ฒ' + 43: 3, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 2, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 1, # 'ภ' + 9: 3, # 'ม' + 16: 1, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 1, # 'ว' + 42: 1, # 'ศ' + 46: 2, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 2, # '้' + 38: 1, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 35: { # 'ู' + 5: 3, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 2, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 2, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 2, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 2, # 'น' + 17: 0, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 1, # 'à¹' + 41: 1, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 3, # '่' + 7: 3, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 11: { # 'เ' + 5: 3, # 'à¸' + 30: 3, # 'ข' + 24: 3, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 3, # 'ฉ' + 34: 3, # 'ช' + 51: 2, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 1, # 'ณ' + 20: 3, # 'ด' + 19: 3, # 'ต' + 44: 1, # 'ถ' + 14: 3, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 3, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 3, # 'พ' + 54: 1, # 'ฟ' + 45: 3, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 3, # 'ว' + 42: 2, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 28: { # 'à¹' + 5: 3, # 'à¸' + 30: 2, # 'ข' + 24: 2, # 'ค' + 8: 1, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 3, # 'ต' + 44: 2, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 2, # 'ป' + 39: 3, # 'ผ' + 62: 0, # 'à¸' + 31: 2, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 41: { # 'โ' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 1, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 1, # 'ภ' + 9: 1, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 3, # 'ล' + 12: 0, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 0, # 'ห' + 4: 2, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 29: { # 'ใ' + 5: 2, # 'à¸' + 30: 0, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 3, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 3, # 'ส' + 21: 3, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 33: { # 'ไ' + 5: 1, # 'à¸' + 30: 2, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 3, # 'ด' + 19: 1, # 'ต' + 44: 0, # 'ถ' + 14: 3, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 1, # 'บ' + 25: 3, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 2, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 0, # 'ย' + 2: 3, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 2, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 50: { # 'ๆ' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 37: { # '็' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 2, # 'ง' + 26: 3, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 1, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 0, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 3, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 1, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 2, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 0, # 'ห' + 4: 1, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 1, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 6: { # '่' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 1, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 1, # 'ธ' + 3: 3, # 'น' + 17: 1, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 1, # 'à¸' + 31: 1, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 3, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 2, # 'ล' + 12: 3, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 1, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 1, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 3, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 1, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 7: { # '้' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 2, # 'ค' + 8: 3, # 'ง' + 26: 2, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 1, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 1, # 'ด' + 19: 2, # 'ต' + 44: 1, # 'ถ' + 14: 2, # 'ท' + 48: 0, # 'ธ' + 3: 3, # 'น' + 17: 2, # 'บ' + 25: 2, # 'ป' + 39: 2, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 3, # 'ม' + 16: 2, # 'ย' + 2: 2, # 'ร' + 61: 0, # 'ฤ' + 15: 1, # 'ล' + 12: 3, # 'ว' + 42: 1, # 'ศ' + 46: 0, # 'ษ' + 18: 2, # 'ส' + 21: 2, # 'ห' + 4: 3, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 3, # 'า' + 36: 2, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 2, # 'ใ' + 33: 2, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 38: { # '์' + 5: 2, # 'à¸' + 30: 1, # 'ข' + 24: 1, # 'ค' + 8: 0, # 'ง' + 26: 1, # 'จ' + 52: 0, # 'ฉ' + 34: 1, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 2, # 'ด' + 19: 1, # 'ต' + 44: 1, # 'ถ' + 14: 1, # 'ท' + 48: 0, # 'ธ' + 3: 1, # 'น' + 17: 1, # 'บ' + 25: 1, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 1, # 'พ' + 54: 1, # 'ฟ' + 45: 0, # 'ภ' + 9: 2, # 'ม' + 16: 0, # 'ย' + 2: 1, # 'ร' + 61: 1, # 'ฤ' + 15: 1, # 'ล' + 12: 1, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 1, # 'ส' + 21: 1, # 'ห' + 4: 2, # 'อ' + 63: 1, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 2, # 'เ' + 28: 2, # 'à¹' + 41: 1, # 'โ' + 29: 1, # 'ใ' + 33: 1, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 0, # '๑' + 59: 0, # '๒' + 60: 0, # '๕' + }, + 56: { # '๑' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 1, # '๕' + }, + 59: { # '๒' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 1, # '๑' + 59: 1, # '๒' + 60: 3, # '๕' + }, + 60: { # '๕' + 5: 0, # 'à¸' + 30: 0, # 'ข' + 24: 0, # 'ค' + 8: 0, # 'ง' + 26: 0, # 'จ' + 52: 0, # 'ฉ' + 34: 0, # 'ช' + 51: 0, # 'ซ' + 47: 0, # 'à¸' + 58: 0, # 'ฎ' + 57: 0, # 'à¸' + 49: 0, # 'à¸' + 53: 0, # 'ฑ' + 55: 0, # 'ฒ' + 43: 0, # 'ณ' + 20: 0, # 'ด' + 19: 0, # 'ต' + 44: 0, # 'ถ' + 14: 0, # 'ท' + 48: 0, # 'ธ' + 3: 0, # 'น' + 17: 0, # 'บ' + 25: 0, # 'ป' + 39: 0, # 'ผ' + 62: 0, # 'à¸' + 31: 0, # 'พ' + 54: 0, # 'ฟ' + 45: 0, # 'ภ' + 9: 0, # 'ม' + 16: 0, # 'ย' + 2: 0, # 'ร' + 61: 0, # 'ฤ' + 15: 0, # 'ล' + 12: 0, # 'ว' + 42: 0, # 'ศ' + 46: 0, # 'ษ' + 18: 0, # 'ส' + 21: 0, # 'ห' + 4: 0, # 'อ' + 63: 0, # 'ฯ' + 22: 0, # 'ะ' + 10: 0, # 'ั' + 1: 0, # 'า' + 36: 0, # 'ำ' + 23: 0, # 'ิ' + 13: 0, # 'ี' + 40: 0, # 'ึ' + 27: 0, # 'ื' + 32: 0, # 'ุ' + 35: 0, # 'ู' + 11: 0, # 'เ' + 28: 0, # 'à¹' + 41: 0, # 'โ' + 29: 0, # 'ใ' + 33: 0, # 'ไ' + 50: 0, # 'ๆ' + 37: 0, # '็' + 6: 0, # '่' + 7: 0, # '้' + 38: 0, # '์' + 56: 2, # '๑' + 59: 1, # '๒' + 60: 0, # '๕' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +TIS_620_THAI_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 254, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 254, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 253, # ' ' + 33: 253, # '!' + 34: 253, # '"' + 35: 253, # '#' + 36: 253, # '$' + 37: 253, # '%' + 38: 253, # '&' + 39: 253, # "'" + 40: 253, # '(' + 41: 253, # ')' + 42: 253, # '*' + 43: 253, # '+' + 44: 253, # ',' + 45: 253, # '-' + 46: 253, # '.' + 47: 253, # '/' + 48: 252, # '0' + 49: 252, # '1' + 50: 252, # '2' + 51: 252, # '3' + 52: 252, # '4' + 53: 252, # '5' + 54: 252, # '6' + 55: 252, # '7' + 56: 252, # '8' + 57: 252, # '9' + 58: 253, # ':' + 59: 253, # ';' + 60: 253, # '<' + 61: 253, # '=' + 62: 253, # '>' + 63: 253, # '?' + 64: 253, # '@' + 65: 182, # 'A' + 66: 106, # 'B' + 67: 107, # 'C' + 68: 100, # 'D' + 69: 183, # 'E' + 70: 184, # 'F' + 71: 185, # 'G' + 72: 101, # 'H' + 73: 94, # 'I' + 74: 186, # 'J' + 75: 187, # 'K' + 76: 108, # 'L' + 77: 109, # 'M' + 78: 110, # 'N' + 79: 111, # 'O' + 80: 188, # 'P' + 81: 189, # 'Q' + 82: 190, # 'R' + 83: 89, # 'S' + 84: 95, # 'T' + 85: 112, # 'U' + 86: 113, # 'V' + 87: 191, # 'W' + 88: 192, # 'X' + 89: 193, # 'Y' + 90: 194, # 'Z' + 91: 253, # '[' + 92: 253, # '\\' + 93: 253, # ']' + 94: 253, # '^' + 95: 253, # '_' + 96: 253, # '`' + 97: 64, # 'a' + 98: 72, # 'b' + 99: 73, # 'c' + 100: 114, # 'd' + 101: 74, # 'e' + 102: 115, # 'f' + 103: 116, # 'g' + 104: 102, # 'h' + 105: 81, # 'i' + 106: 201, # 'j' + 107: 117, # 'k' + 108: 90, # 'l' + 109: 103, # 'm' + 110: 78, # 'n' + 111: 82, # 'o' + 112: 96, # 'p' + 113: 202, # 'q' + 114: 91, # 'r' + 115: 79, # 's' + 116: 84, # 't' + 117: 104, # 'u' + 118: 105, # 'v' + 119: 97, # 'w' + 120: 98, # 'x' + 121: 92, # 'y' + 122: 203, # 'z' + 123: 253, # '{' + 124: 253, # '|' + 125: 253, # '}' + 126: 253, # '~' + 127: 253, # '\x7f' + 128: 209, # '\x80' + 129: 210, # '\x81' + 130: 211, # '\x82' + 131: 212, # '\x83' + 132: 213, # '\x84' + 133: 88, # '\x85' + 134: 214, # '\x86' + 135: 215, # '\x87' + 136: 216, # '\x88' + 137: 217, # '\x89' + 138: 218, # '\x8a' + 139: 219, # '\x8b' + 140: 220, # '\x8c' + 141: 118, # '\x8d' + 142: 221, # '\x8e' + 143: 222, # '\x8f' + 144: 223, # '\x90' + 145: 224, # '\x91' + 146: 99, # '\x92' + 147: 85, # '\x93' + 148: 83, # '\x94' + 149: 225, # '\x95' + 150: 226, # '\x96' + 151: 227, # '\x97' + 152: 228, # '\x98' + 153: 229, # '\x99' + 154: 230, # '\x9a' + 155: 231, # '\x9b' + 156: 232, # '\x9c' + 157: 233, # '\x9d' + 158: 234, # '\x9e' + 159: 235, # '\x9f' + 160: 236, # None + 161: 5, # 'à¸' + 162: 30, # 'ข' + 163: 237, # 'ฃ' + 164: 24, # 'ค' + 165: 238, # 'ฅ' + 166: 75, # 'ฆ' + 167: 8, # 'ง' + 168: 26, # 'จ' + 169: 52, # 'ฉ' + 170: 34, # 'ช' + 171: 51, # 'ซ' + 172: 119, # 'ฌ' + 173: 47, # 'à¸' + 174: 58, # 'ฎ' + 175: 57, # 'à¸' + 176: 49, # 'à¸' + 177: 53, # 'ฑ' + 178: 55, # 'ฒ' + 179: 43, # 'ณ' + 180: 20, # 'ด' + 181: 19, # 'ต' + 182: 44, # 'ถ' + 183: 14, # 'ท' + 184: 48, # 'ธ' + 185: 3, # 'น' + 186: 17, # 'บ' + 187: 25, # 'ป' + 188: 39, # 'ผ' + 189: 62, # 'à¸' + 190: 31, # 'พ' + 191: 54, # 'ฟ' + 192: 45, # 'ภ' + 193: 9, # 'ม' + 194: 16, # 'ย' + 195: 2, # 'ร' + 196: 61, # 'ฤ' + 197: 15, # 'ล' + 198: 239, # 'ฦ' + 199: 12, # 'ว' + 200: 42, # 'ศ' + 201: 46, # 'ษ' + 202: 18, # 'ส' + 203: 21, # 'ห' + 204: 76, # 'ฬ' + 205: 4, # 'อ' + 206: 66, # 'ฮ' + 207: 63, # 'ฯ' + 208: 22, # 'ะ' + 209: 10, # 'ั' + 210: 1, # 'า' + 211: 36, # 'ำ' + 212: 23, # 'ิ' + 213: 13, # 'ี' + 214: 40, # 'ึ' + 215: 27, # 'ื' + 216: 32, # 'ุ' + 217: 35, # 'ู' + 218: 86, # 'ฺ' + 219: 240, # None + 220: 241, # None + 221: 242, # None + 222: 243, # None + 223: 244, # '฿' + 224: 11, # 'เ' + 225: 28, # 'à¹' + 226: 41, # 'โ' + 227: 29, # 'ใ' + 228: 33, # 'ไ' + 229: 245, # 'ๅ' + 230: 50, # 'ๆ' + 231: 37, # '็' + 232: 6, # '่' + 233: 7, # '้' + 234: 67, # '๊' + 235: 77, # '๋' + 236: 38, # '์' + 237: 93, # 'à¹' + 238: 246, # '๎' + 239: 247, # 'à¹' + 240: 68, # 'à¹' + 241: 56, # '๑' + 242: 59, # '๒' + 243: 65, # '๓' + 244: 69, # '๔' + 245: 60, # '๕' + 246: 70, # '๖' + 247: 80, # '๗' + 248: 71, # '๘' + 249: 87, # '๙' + 250: 248, # '๚' + 251: 249, # '๛' + 252: 250, # None + 253: 251, # None + 254: 252, # None + 255: 253, # None +} + +TIS_620_THAI_MODEL = SingleByteCharSetModel(charset_name='TIS-620', + language='Thai', + char_to_order_map=TIS_620_THAI_CHAR_TO_ORDER, + language_model=THAI_LANG_MODEL, + typical_positive_ratio=0.926386, + keep_ascii_letters=False, + alphabet='à¸à¸‚ฃคฅฆงจฉชซฌà¸à¸Žà¸à¸à¸‘ฒณดตถทธนบปผà¸à¸žà¸Ÿà¸ à¸¡à¸¢à¸£à¸¤à¸¥à¸¦à¸§à¸¨à¸©à¸ªà¸«à¸¬à¸­à¸®à¸¯à¸°à¸±à¸²à¸³à¸´à¸µà¸¶à¸·à¸¸à¸¹à¸ºà¸¿à¹€à¹à¹‚ใไๅๆ็่้๊๋์à¹à¹Žà¹à¹à¹‘๒๓๔๕๖๗๘๙๚๛') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langturkishmodel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langturkishmodel.py new file mode 100644 index 000000000..43f4230ae --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langturkishmodel.py @@ -0,0 +1,4383 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel + + +# 3: Positive +# 2: Likely +# 1: Unlikely +# 0: Negative + +TURKISH_LANG_MODEL = { + 23: { # 'A' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 37: { # 'B' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 47: { # 'C' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 39: { # 'D' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 29: { # 'E' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 52: { # 'F' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 36: { # 'G' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 45: { # 'H' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 2, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 2, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 53: { # 'I' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 60: { # 'J' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 16: { # 'K' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 49: { # 'L' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 2, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 20: { # 'M' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 46: { # 'N' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 42: { # 'O' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 2, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 48: { # 'P' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 44: { # 'R' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 35: { # 'S' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 2, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 31: { # 'T' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 51: { # 'U' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 1, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 38: { # 'V' + 23: 1, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 2, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 1, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 62: { # 'W' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 43: { # 'Y' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 1, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 56: { # 'Z' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 1: { # 'a' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 21: { # 'b' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 2, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 28: { # 'c' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 3, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 1, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 12: { # 'd' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 2: { # 'e' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 2, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 18: { # 'f' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 1, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 1, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 27: { # 'g' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 25: { # 'h' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 3: { # 'i' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 1, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 1, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 1, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 24: { # 'j' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 2, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 10: { # 'k' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 5: { # 'l' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 1, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 13: { # 'm' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 2, # 'u' + 32: 2, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 4: { # 'n' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 3, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 15: { # 'o' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 2, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ÄŸ' + 41: 2, # 'İ' + 6: 3, # 'ı' + 40: 2, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 26: { # 'p' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 1, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 7: { # 'r' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 1, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 3, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 8: { # 's' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 2, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 9: { # 't' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 2, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 2, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 14: { # 'u' + 23: 3, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 3, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 2, # 'Z' + 1: 2, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 2, # 'e' + 18: 2, # 'f' + 27: 3, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 2, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 32: { # 'v' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 1, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 57: { # 'w' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 1, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 1, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 2, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 58: { # 'x' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 2, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 11: { # 'y' + 23: 1, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 2, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 3, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 22: { # 'z' + 23: 2, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 2, # 'D' + 29: 3, # 'E' + 52: 1, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 3, # 'T' + 51: 2, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 2, # 'e' + 18: 3, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 3, # 'y' + 22: 2, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 3, # 'ı' + 40: 1, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 63: { # '·' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 54: { # 'Ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 1, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 0, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 2, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 2, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 2, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 50: { # 'Ö' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 2, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 1, # 'N' + 42: 2, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 1, # 'f' + 27: 1, # 'g' + 25: 1, # 'h' + 3: 2, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 2, # 'p' + 7: 3, # 'r' + 8: 1, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 1, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 2, # 'ü' + 30: 1, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 55: { # 'Ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 1, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 1, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 1, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 1, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 1, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 0, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 59: { # 'â' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 0, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 2, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 2, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 33: { # 'ç' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 3, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 0, # 'Z' + 1: 0, # 'a' + 21: 3, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 2, # 'f' + 27: 1, # 'g' + 25: 3, # 'h' + 3: 3, # 'i' + 24: 0, # 'j' + 10: 3, # 'k' + 5: 0, # 'l' + 13: 0, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 3, # 't' + 14: 0, # 'u' + 32: 2, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 1, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 61: { # 'î' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 0, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 0, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 2, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 1, # 'j' + 10: 0, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 1, # 'n' + 15: 0, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 1, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 1, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 1, # 'î' + 34: 0, # 'ö' + 17: 0, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 1, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 34: { # 'ö' + 23: 0, # 'A' + 37: 1, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 1, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 1, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 2, # 'h' + 3: 1, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 2, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 0, # 'r' + 8: 3, # 's' + 9: 1, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 1, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 2, # 'ÄŸ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 17: { # 'ü' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 0, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 1, # 'J' + 16: 1, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 0, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 0, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 0, # 'c' + 12: 1, # 'd' + 2: 3, # 'e' + 18: 1, # 'f' + 27: 2, # 'g' + 25: 0, # 'h' + 3: 1, # 'i' + 24: 1, # 'j' + 10: 2, # 'k' + 5: 3, # 'l' + 13: 2, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 2, # 'p' + 7: 2, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 3, # 'u' + 32: 1, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 2, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 30: { # 'ÄŸ' + 23: 0, # 'A' + 37: 2, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 1, # 'M' + 46: 2, # 'N' + 42: 2, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 0, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 2, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 0, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 2, # 'e' + 18: 0, # 'f' + 27: 0, # 'g' + 25: 0, # 'h' + 3: 0, # 'i' + 24: 3, # 'j' + 10: 1, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 1, # 'o' + 26: 0, # 'p' + 7: 1, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 2, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 2, # 'İ' + 6: 2, # 'ı' + 40: 2, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 41: { # 'İ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 2, # 'G' + 45: 2, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 0, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 0, # 'Z' + 1: 1, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 2, # 'd' + 2: 1, # 'e' + 18: 0, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 2, # 'i' + 24: 2, # 'j' + 10: 2, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 1, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 2, # 't' + 14: 0, # 'u' + 32: 0, # 'v' + 57: 1, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 1, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 1, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 1, # 'ÅŸ' + }, + 6: { # 'ı' + 23: 2, # 'A' + 37: 0, # 'B' + 47: 0, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 2, # 'J' + 16: 3, # 'K' + 49: 0, # 'L' + 20: 3, # 'M' + 46: 1, # 'N' + 42: 0, # 'O' + 48: 0, # 'P' + 44: 0, # 'R' + 35: 0, # 'S' + 31: 2, # 'T' + 51: 0, # 'U' + 38: 0, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 3, # 'a' + 21: 2, # 'b' + 28: 1, # 'c' + 12: 3, # 'd' + 2: 3, # 'e' + 18: 3, # 'f' + 27: 3, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 3, # 'j' + 10: 3, # 'k' + 5: 3, # 'l' + 13: 3, # 'm' + 4: 3, # 'n' + 15: 0, # 'o' + 26: 3, # 'p' + 7: 3, # 'r' + 8: 3, # 's' + 9: 3, # 't' + 14: 3, # 'u' + 32: 3, # 'v' + 57: 1, # 'w' + 58: 1, # 'x' + 11: 3, # 'y' + 22: 0, # 'z' + 63: 1, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 2, # 'ç' + 61: 0, # 'î' + 34: 0, # 'ö' + 17: 3, # 'ü' + 30: 0, # 'ÄŸ' + 41: 0, # 'İ' + 6: 3, # 'ı' + 40: 0, # 'Åž' + 19: 0, # 'ÅŸ' + }, + 40: { # 'Åž' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 1, # 'D' + 29: 1, # 'E' + 52: 0, # 'F' + 36: 1, # 'G' + 45: 2, # 'H' + 53: 1, # 'I' + 60: 0, # 'J' + 16: 0, # 'K' + 49: 0, # 'L' + 20: 2, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 2, # 'P' + 44: 2, # 'R' + 35: 1, # 'S' + 31: 1, # 'T' + 51: 0, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 2, # 'Y' + 56: 1, # 'Z' + 1: 0, # 'a' + 21: 2, # 'b' + 28: 0, # 'c' + 12: 2, # 'd' + 2: 0, # 'e' + 18: 3, # 'f' + 27: 0, # 'g' + 25: 2, # 'h' + 3: 3, # 'i' + 24: 2, # 'j' + 10: 1, # 'k' + 5: 0, # 'l' + 13: 1, # 'm' + 4: 3, # 'n' + 15: 2, # 'o' + 26: 0, # 'p' + 7: 3, # 'r' + 8: 2, # 's' + 9: 2, # 't' + 14: 1, # 'u' + 32: 3, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 2, # 'y' + 22: 0, # 'z' + 63: 0, # '·' + 54: 0, # 'Ç' + 50: 0, # 'Ö' + 55: 1, # 'Ü' + 59: 0, # 'â' + 33: 0, # 'ç' + 61: 0, # 'î' + 34: 2, # 'ö' + 17: 1, # 'ü' + 30: 2, # 'ÄŸ' + 41: 0, # 'İ' + 6: 2, # 'ı' + 40: 1, # 'Åž' + 19: 2, # 'ÅŸ' + }, + 19: { # 'ÅŸ' + 23: 0, # 'A' + 37: 0, # 'B' + 47: 1, # 'C' + 39: 0, # 'D' + 29: 0, # 'E' + 52: 2, # 'F' + 36: 1, # 'G' + 45: 0, # 'H' + 53: 0, # 'I' + 60: 0, # 'J' + 16: 3, # 'K' + 49: 2, # 'L' + 20: 0, # 'M' + 46: 1, # 'N' + 42: 1, # 'O' + 48: 1, # 'P' + 44: 1, # 'R' + 35: 1, # 'S' + 31: 0, # 'T' + 51: 1, # 'U' + 38: 1, # 'V' + 62: 0, # 'W' + 43: 1, # 'Y' + 56: 0, # 'Z' + 1: 3, # 'a' + 21: 1, # 'b' + 28: 2, # 'c' + 12: 0, # 'd' + 2: 3, # 'e' + 18: 0, # 'f' + 27: 2, # 'g' + 25: 1, # 'h' + 3: 1, # 'i' + 24: 0, # 'j' + 10: 2, # 'k' + 5: 2, # 'l' + 13: 3, # 'm' + 4: 0, # 'n' + 15: 0, # 'o' + 26: 1, # 'p' + 7: 3, # 'r' + 8: 0, # 's' + 9: 0, # 't' + 14: 3, # 'u' + 32: 0, # 'v' + 57: 0, # 'w' + 58: 0, # 'x' + 11: 0, # 'y' + 22: 2, # 'z' + 63: 0, # '·' + 54: 1, # 'Ç' + 50: 2, # 'Ö' + 55: 0, # 'Ü' + 59: 0, # 'â' + 33: 1, # 'ç' + 61: 1, # 'î' + 34: 2, # 'ö' + 17: 0, # 'ü' + 30: 1, # 'ÄŸ' + 41: 1, # 'İ' + 6: 1, # 'ı' + 40: 1, # 'Åž' + 19: 1, # 'ÅŸ' + }, +} + +# 255: Undefined characters that did not exist in training text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 +# 251: Control characters + +# Character Mapping Table(s): +ISO_8859_9_TURKISH_CHAR_TO_ORDER = { + 0: 255, # '\x00' + 1: 255, # '\x01' + 2: 255, # '\x02' + 3: 255, # '\x03' + 4: 255, # '\x04' + 5: 255, # '\x05' + 6: 255, # '\x06' + 7: 255, # '\x07' + 8: 255, # '\x08' + 9: 255, # '\t' + 10: 255, # '\n' + 11: 255, # '\x0b' + 12: 255, # '\x0c' + 13: 255, # '\r' + 14: 255, # '\x0e' + 15: 255, # '\x0f' + 16: 255, # '\x10' + 17: 255, # '\x11' + 18: 255, # '\x12' + 19: 255, # '\x13' + 20: 255, # '\x14' + 21: 255, # '\x15' + 22: 255, # '\x16' + 23: 255, # '\x17' + 24: 255, # '\x18' + 25: 255, # '\x19' + 26: 255, # '\x1a' + 27: 255, # '\x1b' + 28: 255, # '\x1c' + 29: 255, # '\x1d' + 30: 255, # '\x1e' + 31: 255, # '\x1f' + 32: 255, # ' ' + 33: 255, # '!' + 34: 255, # '"' + 35: 255, # '#' + 36: 255, # '$' + 37: 255, # '%' + 38: 255, # '&' + 39: 255, # "'" + 40: 255, # '(' + 41: 255, # ')' + 42: 255, # '*' + 43: 255, # '+' + 44: 255, # ',' + 45: 255, # '-' + 46: 255, # '.' + 47: 255, # '/' + 48: 255, # '0' + 49: 255, # '1' + 50: 255, # '2' + 51: 255, # '3' + 52: 255, # '4' + 53: 255, # '5' + 54: 255, # '6' + 55: 255, # '7' + 56: 255, # '8' + 57: 255, # '9' + 58: 255, # ':' + 59: 255, # ';' + 60: 255, # '<' + 61: 255, # '=' + 62: 255, # '>' + 63: 255, # '?' + 64: 255, # '@' + 65: 23, # 'A' + 66: 37, # 'B' + 67: 47, # 'C' + 68: 39, # 'D' + 69: 29, # 'E' + 70: 52, # 'F' + 71: 36, # 'G' + 72: 45, # 'H' + 73: 53, # 'I' + 74: 60, # 'J' + 75: 16, # 'K' + 76: 49, # 'L' + 77: 20, # 'M' + 78: 46, # 'N' + 79: 42, # 'O' + 80: 48, # 'P' + 81: 69, # 'Q' + 82: 44, # 'R' + 83: 35, # 'S' + 84: 31, # 'T' + 85: 51, # 'U' + 86: 38, # 'V' + 87: 62, # 'W' + 88: 65, # 'X' + 89: 43, # 'Y' + 90: 56, # 'Z' + 91: 255, # '[' + 92: 255, # '\\' + 93: 255, # ']' + 94: 255, # '^' + 95: 255, # '_' + 96: 255, # '`' + 97: 1, # 'a' + 98: 21, # 'b' + 99: 28, # 'c' + 100: 12, # 'd' + 101: 2, # 'e' + 102: 18, # 'f' + 103: 27, # 'g' + 104: 25, # 'h' + 105: 3, # 'i' + 106: 24, # 'j' + 107: 10, # 'k' + 108: 5, # 'l' + 109: 13, # 'm' + 110: 4, # 'n' + 111: 15, # 'o' + 112: 26, # 'p' + 113: 64, # 'q' + 114: 7, # 'r' + 115: 8, # 's' + 116: 9, # 't' + 117: 14, # 'u' + 118: 32, # 'v' + 119: 57, # 'w' + 120: 58, # 'x' + 121: 11, # 'y' + 122: 22, # 'z' + 123: 255, # '{' + 124: 255, # '|' + 125: 255, # '}' + 126: 255, # '~' + 127: 255, # '\x7f' + 128: 180, # '\x80' + 129: 179, # '\x81' + 130: 178, # '\x82' + 131: 177, # '\x83' + 132: 176, # '\x84' + 133: 175, # '\x85' + 134: 174, # '\x86' + 135: 173, # '\x87' + 136: 172, # '\x88' + 137: 171, # '\x89' + 138: 170, # '\x8a' + 139: 169, # '\x8b' + 140: 168, # '\x8c' + 141: 167, # '\x8d' + 142: 166, # '\x8e' + 143: 165, # '\x8f' + 144: 164, # '\x90' + 145: 163, # '\x91' + 146: 162, # '\x92' + 147: 161, # '\x93' + 148: 160, # '\x94' + 149: 159, # '\x95' + 150: 101, # '\x96' + 151: 158, # '\x97' + 152: 157, # '\x98' + 153: 156, # '\x99' + 154: 155, # '\x9a' + 155: 154, # '\x9b' + 156: 153, # '\x9c' + 157: 152, # '\x9d' + 158: 151, # '\x9e' + 159: 106, # '\x9f' + 160: 150, # '\xa0' + 161: 149, # '¡' + 162: 148, # '¢' + 163: 147, # '£' + 164: 146, # '¤' + 165: 145, # 'Â¥' + 166: 144, # '¦' + 167: 100, # '§' + 168: 143, # '¨' + 169: 142, # '©' + 170: 141, # 'ª' + 171: 140, # '«' + 172: 139, # '¬' + 173: 138, # '\xad' + 174: 137, # '®' + 175: 136, # '¯' + 176: 94, # '°' + 177: 80, # '±' + 178: 93, # '²' + 179: 135, # '³' + 180: 105, # '´' + 181: 134, # 'µ' + 182: 133, # '¶' + 183: 63, # '·' + 184: 132, # '¸' + 185: 131, # '¹' + 186: 130, # 'º' + 187: 129, # '»' + 188: 128, # '¼' + 189: 127, # '½' + 190: 126, # '¾' + 191: 125, # '¿' + 192: 124, # 'À' + 193: 104, # 'Ã' + 194: 73, # 'Â' + 195: 99, # 'Ã' + 196: 79, # 'Ä' + 197: 85, # 'Ã…' + 198: 123, # 'Æ' + 199: 54, # 'Ç' + 200: 122, # 'È' + 201: 98, # 'É' + 202: 92, # 'Ê' + 203: 121, # 'Ë' + 204: 120, # 'ÃŒ' + 205: 91, # 'Ã' + 206: 103, # 'ÃŽ' + 207: 119, # 'Ã' + 208: 68, # 'Äž' + 209: 118, # 'Ñ' + 210: 117, # 'Ã’' + 211: 97, # 'Ó' + 212: 116, # 'Ô' + 213: 115, # 'Õ' + 214: 50, # 'Ö' + 215: 90, # '×' + 216: 114, # 'Ø' + 217: 113, # 'Ù' + 218: 112, # 'Ú' + 219: 111, # 'Û' + 220: 55, # 'Ü' + 221: 41, # 'İ' + 222: 40, # 'Åž' + 223: 86, # 'ß' + 224: 89, # 'à' + 225: 70, # 'á' + 226: 59, # 'â' + 227: 78, # 'ã' + 228: 71, # 'ä' + 229: 82, # 'Ã¥' + 230: 88, # 'æ' + 231: 33, # 'ç' + 232: 77, # 'è' + 233: 66, # 'é' + 234: 84, # 'ê' + 235: 83, # 'ë' + 236: 110, # 'ì' + 237: 75, # 'í' + 238: 61, # 'î' + 239: 96, # 'ï' + 240: 30, # 'ÄŸ' + 241: 67, # 'ñ' + 242: 109, # 'ò' + 243: 74, # 'ó' + 244: 87, # 'ô' + 245: 102, # 'õ' + 246: 34, # 'ö' + 247: 95, # '÷' + 248: 81, # 'ø' + 249: 108, # 'ù' + 250: 76, # 'ú' + 251: 72, # 'û' + 252: 17, # 'ü' + 253: 6, # 'ı' + 254: 19, # 'ÅŸ' + 255: 107, # 'ÿ' +} + +ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(charset_name='ISO-8859-9', + language='Turkish', + char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER, + language_model=TURKISH_LANG_MODEL, + typical_positive_ratio=0.97029, + keep_ascii_letters=True, + alphabet='ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş') + diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/latin1prober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/latin1prober.py new file mode 100644 index 000000000..7d1e8c20f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcharsetprober.py new file mode 100644 index 000000000..6256ecfd1 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcsgroupprober.py new file mode 100644 index 000000000..530abe75e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcssm.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcssm.py new file mode 100644 index 000000000..8360d0f28 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..bee5b4194 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-310.pyc new file mode 100644 index 000000000..3e181abe5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/__pycache__/languages.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/languages.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/languages.py new file mode 100644 index 000000000..3237d5abf --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/metadata/languages.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +Metadata about languages used by our model training code for our +SingleByteCharSetProbers. Could be used for other things in the future. + +This code is based on the language metadata from the uchardet project. +""" +from __future__ import absolute_import, print_function + +from string import ascii_letters + + +# TODO: Add Ukranian (KOI8-U) + +class Language(object): + """Metadata about a language useful for training models + + :ivar name: The human name for the language, in English. + :type name: str + :ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise, + or use another catalog as a last resort. + :type iso_code: str + :ivar use_ascii: Whether or not ASCII letters should be included in trained + models. + :type use_ascii: bool + :ivar charsets: The charsets we want to support and create data for. + :type charsets: list of str + :ivar alphabet: The characters in the language's alphabet. If `use_ascii` is + `True`, you only need to add those not in the ASCII set. + :type alphabet: str + :ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling + Wikipedia for training data. + :type wiki_start_pages: list of str + """ + def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None, + alphabet=None, wiki_start_pages=None): + super(Language, self).__init__() + self.name = name + self.iso_code = iso_code + self.use_ascii = use_ascii + self.charsets = charsets + if self.use_ascii: + if alphabet: + alphabet += ascii_letters + else: + alphabet = ascii_letters + elif not alphabet: + raise ValueError('Must supply alphabet if use_ascii is False') + self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None + self.wiki_start_pages = wiki_start_pages + + def __repr__(self): + return '{}({})'.format(self.__class__.__name__, + ', '.join('{}={!r}'.format(k, v) + for k, v in self.__dict__.items() + if not k.startswith('_'))) + + +LANGUAGES = {'Arabic': Language(name='Arabic', + iso_code='ar', + use_ascii=False, + # We only support encodings that use isolated + # forms, because the current recommendation is + # that the rendering system handles presentation + # forms. This means we purposefully skip IBM864. + charsets=['ISO-8859-6', 'WINDOWS-1256', + 'CP720', 'CP864'], + alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـÙقكلمنهوىيًٌÙÙŽÙÙÙ‘', + wiki_start_pages=[u'Ø§Ù„ØµÙØ­Ø©_الرئيسية']), + 'Belarusian': Language(name='Belarusian', + iso_code='be', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM866', 'MacCyrillic'], + alphabet=(u'ÐБВГДЕÐЖЗІЙКЛМÐОПРСТУЎФХЦЧШЫЬЭЮЯ' + u'абвгдеёжзійклмнопрÑтуўфхцчшыьÑÑŽÑʼ'), + wiki_start_pages=[u'ГалоўнаÑ_Ñтаронка']), + 'Bulgarian': Language(name='Bulgarian', + iso_code='bg', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'IBM855'], + alphabet=(u'ÐБВГДЕЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЬЮЯ' + u'абвгдежзийклмнопрÑтуфхцчшщъьюÑ'), + wiki_start_pages=[u'Ðачална_Ñтраница']), + 'Czech': Language(name='Czech', + iso_code='cz', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áÄÄéěíňóřšťúůýžÃČĎÉĚÃŇÓŘŠŤÚŮÃŽ', + wiki_start_pages=[u'Hlavní_strana']), + 'Danish': Language(name='Danish', + iso_code='da', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'æøåÆØÅ', + wiki_start_pages=[u'Forside']), + 'German': Language(name='German', + iso_code='de', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + alphabet=u'äöüßÄÖÜ', + wiki_start_pages=[u'Wikipedia:Hauptseite']), + 'Greek': Language(name='Greek', + iso_code='el', + use_ascii=False, + charsets=['ISO-8859-7', 'WINDOWS-1253'], + alphabet=(u'αβγδεζηθικλμνξοπÏσςτυφχψωάέήίόÏÏŽ' + u'ΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎÎ'), + wiki_start_pages=[u'ΠÏλη:ΚÏÏια']), + 'English': Language(name='English', + iso_code='en', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Main_Page']), + 'Esperanto': Language(name='Esperanto', + iso_code='eo', + # Q, W, X, and Y not used at all + use_ascii=False, + charsets=['ISO-8859-3'], + alphabet=(u'abcĉdefgÄhÄ¥ijĵklmnoprsÅtuÅ­vz' + u'ABCĈDEFGÄœHĤIJÄ´KLMNOPRSÅœTUŬVZ'), + wiki_start_pages=[u'Vikipedio:ĈefpaÄo']), + 'Spanish': Language(name='Spanish', + iso_code='es', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ñáéíóúüÑÃÉÃÓÚÜ', + wiki_start_pages=[u'Wikipedia:Portada']), + 'Estonian': Language(name='Estonian', + iso_code='et', + use_ascii=False, + charsets=['ISO-8859-4', 'ISO-8859-13', + 'WINDOWS-1257'], + # C, F, Å , Q, W, X, Y, Z, Ž are only for + # loanwords + alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ' + u'abdeghijklmnoprstuvõäöü'), + wiki_start_pages=[u'Esileht']), + 'Finnish': Language(name='Finnish', + iso_code='fi', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÅÄÖŠŽåäöšž', + wiki_start_pages=[u'Wikipedia:Etusivu']), + 'French': Language(name='French', + iso_code='fr', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÃÙÛÊ', + wiki_start_pages=[u'Wikipédia:Accueil_principal', + u'BÅ“uf (animal)']), + 'Hebrew': Language(name='Hebrew', + iso_code='he', + use_ascii=False, + charsets=['ISO-8859-8', 'WINDOWS-1255'], + alphabet=u'×בגדהוזחטיךכל×מןנסעףפץצקרשתװױײ', + wiki_start_pages=[u'עמוד_ר×שי']), + 'Croatian': Language(name='Croatian', + iso_code='hr', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcÄćdÄ‘efghijklmnoprsÅ¡tuvzž' + u'ABCČĆDÄEFGHIJKLMNOPRSÅ TUVZŽ'), + wiki_start_pages=[u'Glavna_stranica']), + 'Hungarian': Language(name='Hungarian', + iso_code='hu', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű' + u'ABCDEFGHIJKLMNOPRSTUVZÃÉÃÓÖÅÚÜŰ'), + wiki_start_pages=[u'KezdÅ‘lap']), + 'Italian': Language(name='Italian', + iso_code='it', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÀÈÉÌÒÓÙàèéìòóù', + wiki_start_pages=[u'Pagina_principale']), + 'Lithuanian': Language(name='Lithuanian', + iso_code='lt', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, and X not used at all + alphabet=(u'AÄ„BCÄŒDEĘĖFGHIÄ®YJKLMNOPRSÅ TUŲŪVZŽ' + u'aÄ…bcÄdeęėfghiįyjklmnoprsÅ¡tuųūvzž'), + wiki_start_pages=[u'Pagrindinis_puslapis']), + 'Latvian': Language(name='Latvian', + iso_code='lv', + use_ascii=False, + charsets=['ISO-8859-13', 'WINDOWS-1257', + 'ISO-8859-4'], + # Q, W, X, Y are only for loanwords + alphabet=(u'AÄ€BCÄŒDEÄ’FGÄ¢HIĪJKĶLÄ»MNÅ…OPRSÅ TUŪVZŽ' + u'aÄbcÄdeÄ“fgÄ£hiÄ«jkÄ·lļmnņoprsÅ¡tuÅ«vzž'), + wiki_start_pages=[u'SÄkumlapa']), + 'Macedonian': Language(name='Macedonian', + iso_code='mk', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + alphabet=(u'ÐБВГДЃЕЖЗЅИЈКЛЉМÐЊОПРСТЌУФХЦЧÐШ' + u'абвгдѓежзѕијклљмнњопрÑтќуфхцчџш'), + wiki_start_pages=[u'Главна_Ñтраница']), + 'Dutch': Language(name='Dutch', + iso_code='nl', + use_ascii=True, + charsets=['ISO-8859-1', 'WINDOWS-1252'], + wiki_start_pages=[u'Hoofdpagina']), + 'Polish': Language(name='Polish', + iso_code='pl', + # Q and X are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'AÄ„BCĆDEĘFGHIJKLÅMNŃOÓPRSÅšTUWYZŹŻ' + u'aÄ…bcćdeÄ™fghijklÅ‚mnÅ„oóprsÅ›tuwyzźż'), + wiki_start_pages=[u'Wikipedia:Strona_główna']), + 'Portuguese': Language(name='Portuguese', + iso_code='pt', + use_ascii=True, + charsets=['ISO-8859-1', 'ISO-8859-15', + 'WINDOWS-1252'], + alphabet=u'ÃÂÃÀÇÉÊÃÓÔÕÚáâãàçéêíóôõú', + wiki_start_pages=[u'Wikipédia:Página_principal']), + 'Romanian': Language(name='Romanian', + iso_code='ro', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'ăâîșțĂÂÎȘȚ', + wiki_start_pages=[u'Pagina_principală']), + 'Russian': Language(name='Russian', + iso_code='ru', + use_ascii=False, + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'KOI8-R', 'MacCyrillic', 'IBM866', + 'IBM855'], + alphabet=(u'абвгдеёжзийклмнопрÑтуфхцчшщъыьÑÑŽÑ' + u'ÐБВГДЕÐЖЗИЙКЛМÐОПРСТУФХЦЧШЩЪЫЬЭЮЯ'), + wiki_start_pages=[u'ЗаглавнаÑ_Ñтраница']), + 'Slovak': Language(name='Slovak', + iso_code='sk', + use_ascii=True, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=u'áäÄÄéíĺľňóôŕšťúýžÃÄČĎÉÃĹĽŇÓÔŔŠŤÚÃŽ', + wiki_start_pages=[u'Hlavná_stránka']), + 'Slovene': Language(name='Slovene', + iso_code='sl', + # Q, W, X, Y are only used for foreign words. + use_ascii=False, + charsets=['ISO-8859-2', 'WINDOWS-1250'], + alphabet=(u'abcÄdefghijklmnoprsÅ¡tuvzž' + u'ABCÄŒDEFGHIJKLMNOPRSÅ TUVZŽ'), + wiki_start_pages=[u'Glavna_stran']), + # Serbian can be written in both Latin and Cyrillic, but there's no + # simple way to get the Latin alphabet pages from Wikipedia through + # the API, so for now we just support Cyrillic. + 'Serbian': Language(name='Serbian', + iso_code='sr', + alphabet=(u'ÐБВГДЂЕЖЗИЈКЛЉМÐЊОПРСТЋУФХЦЧÐШ' + u'абвгдђежзијклљмнњопрÑтћуфхцчџш'), + charsets=['ISO-8859-5', 'WINDOWS-1251', + 'MacCyrillic', 'IBM855'], + wiki_start_pages=[u'Главна_Ñтрана']), + 'Thai': Language(name='Thai', + iso_code='th', + use_ascii=False, + charsets=['ISO-8859-11', 'TIS-620', 'CP874'], + alphabet=u'à¸à¸‚ฃคฅฆงจฉชซฌà¸à¸Žà¸à¸à¸‘ฒณดตถทธนบปผà¸à¸žà¸Ÿà¸ à¸¡à¸¢à¸£à¸¤à¸¥à¸¦à¸§à¸¨à¸©à¸ªà¸«à¸¬à¸­à¸®à¸¯à¸°à¸±à¸²à¸³à¸´à¸µà¸¶à¸·à¸ºà¸¸à¸¹à¸¿à¹€à¹à¹‚ใไๅๆ็่้๊๋์à¹à¹Žà¹à¹à¹‘๒๓๔๕๖๗๘๙๚๛', + wiki_start_pages=[u'หน้าหลัà¸']), + 'Turkish': Language(name='Turkish', + iso_code='tr', + # Q, W, and X are not used by Turkish + use_ascii=False, + charsets=['ISO-8859-3', 'ISO-8859-9', + 'WINDOWS-1254'], + alphabet=(u'abcçdefgÄŸhıijklmnoöprsÅŸtuüvyzâîû' + u'ABCÇDEFGÄžHIİJKLMNOÖPRSÅžTUÜVYZÂÎÛ'), + wiki_start_pages=[u'Ana_Sayfa']), + 'Vietnamese': Language(name='Vietnamese', + iso_code='vi', + use_ascii=False, + # Windows-1258 is the only common 8-bit + # Vietnamese encoding supported by Python. + # From Wikipedia: + # For systems that lack support for Unicode, + # dozens of 8-bit Vietnamese code pages are + # available.[1] The most common are VISCII + # (TCVN 5712:1993), VPS, and Windows-1258.[3] + # Where ASCII is required, such as when + # ensuring readability in plain text e-mail, + # Vietnamese letters are often encoded + # according to Vietnamese Quoted-Readable + # (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4] + # though usage of either variable-width + # scheme has declined dramatically following + # the adoption of Unicode on the World Wide + # Web. + charsets=['WINDOWS-1258'], + alphabet=(u'aăâbcdÄ‘eêghiklmnoôơpqrstuưvxy' + u'AĂÂBCDÄEÊGHIKLMNOÔƠPQRSTUƯVXY'), + wiki_start_pages=[u'Chữ_Quốc_ngữ']), + } diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sbcharsetprober.py new file mode 100644 index 000000000..46ba835c6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sbcharsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from collections import namedtuple + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +SingleByteCharSetModel = namedtuple('SingleByteCharSetModel', + ['charset_name', + 'language', + 'char_to_order_map', + 'language_model', + 'typical_positive_ratio', + 'keep_ascii_letters', + 'alphabet']) + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model.charset_name + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.language + + def feed(self, byte_str): + # TODO: Make filter_international_words keep things in self.alphabet + if not self._model.keep_ascii_letters: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model.char_to_order_map + language_model = self._model.language_model + for char in byte_str: + order = char_to_order_map.get(char, CharacterCategory.UNDEFINED) + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + # TODO: Follow uchardet's lead and discount confidence for frequent + # control characters. + # See https://github.com/BYVoid/uchardet/commit/55b4f23971db61 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + lm_cat = language_model[self._last_order][order] + else: + lm_cat = language_model[order][self._last_order] + self._seq_counters[lm_cat] += 1 + self._last_order = order + + charset_name = self._model.charset_name + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model.typical_positive_ratio) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sbcsgroupprober.py new file mode 100644 index 000000000..bdeef4e15 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sbcsgroupprober.py @@ -0,0 +1,83 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .hebrewprober import HebrewProber +from .langbulgarianmodel import (ISO_8859_5_BULGARIAN_MODEL, + WINDOWS_1251_BULGARIAN_MODEL) +from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL +from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL +# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL, +# WINDOWS_1250_HUNGARIAN_MODEL) +from .langrussianmodel import (IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL, + ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL, + MACCYRILLIC_RUSSIAN_MODEL, + WINDOWS_1251_RUSSIAN_MODEL) +from .langthaimodel import TIS_620_THAI_MODEL +from .langturkishmodel import ISO_8859_9_TURKISH_MODEL +from .sbcharsetprober import SingleByteCharSetProber + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + False, hebrew_prober) + # TODO: See if using ISO-8859-8 Hebrew model works better here, since + # it's actually the visual one + visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL, + True, hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, + visual_hebrew_prober) + # TODO: ORDER MATTERS HERE. I changed the order vs what was in master + # and several tests failed that did not before. Some thought + # should be put into the ordering, and we should consider making + # order not matter here, because that is very counter-intuitive. + self.probers = [ + SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL), + SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL), + SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM866_RUSSIAN_MODEL), + SingleByteCharSetProber(IBM855_RUSSIAN_MODEL), + SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL), + SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL), + SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL), + SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL), + # SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL), + SingleByteCharSetProber(TIS_620_THAI_MODEL), + SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL), + hebrew_prober, + logical_hebrew_prober, + visual_hebrew_prober, + ] + self.reset() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sjisprober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sjisprober.py new file mode 100644 index 000000000..9e29623bd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/universaldetector.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/universaldetector.py new file mode 100644 index 000000000..055a8ac1b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() <= logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + group_prober.charset_name, + group_prober.language, + group_prober.get_confidence()) + return self.result diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/utf8prober.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/utf8prober.py new file mode 100644 index 000000000..6c3196cc2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/version.py b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/version.py new file mode 100644 index 000000000..70369b9d6 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "4.0.0" +VERSION = __version__.split('.') diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__init__.py new file mode 100644 index 000000000..b149ed79b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__init__.py @@ -0,0 +1,6 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.4.4' diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..5f54b6138 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc new file mode 100644 index 000000000..e94c8fb55 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc new file mode 100644 index 000000000..9e94ab7a1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc new file mode 100644 index 000000000..6c11b3dd1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc new file mode 100644 index 000000000..ba74b08db Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc new file mode 100644 index 000000000..7ad817548 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansi.py b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansi.py new file mode 100644 index 000000000..11ec695ff --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\a' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansitowin32.py b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 000000000..6039a0543 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -0,0 +1,258 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL +from .winterm import WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def __enter__(self, *args, **kwargs): + # special method lookup bypasses __getattr__/__getattribute__, see + # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit + # thus, contextlib magic methods are not proxied via __getattr__ + return self.__wrapped.__enter__(*args, **kwargs) + + def __exit__(self, *args, **kwargs): + return self.__wrapped.__exit__(*args, **kwargs) + + def write(self, text): + self.__convertor.write(text) + + def isatty(self): + stream = self.__wrapped + if 'PYCHARM_HOSTED' in os.environ: + if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): + return True + try: + stream_isatty = stream.isatty + except AttributeError: + return False + else: + return stream_isatty() + + @property + def closed(self): + stream = self.__wrapped + try: + return stream.closed + except AttributeError: + return True + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + + # should we strip ANSI sequences from our output? + if strip is None: + strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = conversion_supported and not self.stream.closed and self.stream.isatty() + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not self.stream.closed: + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command == BEL: + if paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/initialise.py b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/initialise.py new file mode 100644 index 000000000..430d06687 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/initialise.py @@ -0,0 +1,80 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +orig_stdout = None +orig_stderr = None + +wrapped_stdout = None +wrapped_stderr = None + +atexit_done = False + + +def reset_all(): + if AnsiToWin32 is not None: # Issue #74: objects might become None at exit + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/win32.py b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/win32.py new file mode 100644 index 000000000..c2d836033 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/win32.py @@ -0,0 +1,152 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW + _SetConsoleTitleW.argtypes = [ + wintypes.LPCWSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + def _winapi_test(handle): + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def winapi_test(): + return any(_winapi_test(h) for h in + (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = _GetStdHandle(stream_id) + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = _GetStdHandle(stream_id) + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = _GetStdHandle(stream_id) + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = _GetStdHandle(stream_id) + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = _GetStdHandle(stream_id) + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/winterm.py b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/winterm.py new file mode 100644 index 000000000..0fdb4ec4e --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/winterm.py @@ -0,0 +1,169 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from . import win32 + + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + self._light = 0 + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + elif mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + elif mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__init__.py new file mode 100644 index 000000000..6878387a0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2019 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.3.4' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..7232f0263 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-310.pyc new file mode 100644 index 000000000..1a80fb6e2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-310.pyc new file mode 100644 index 000000000..27c3a4c54 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-310.pyc new file mode 100644 index 000000000..36c06b256 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-310.pyc new file mode 100644 index 000000000..a3949b008 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc new file mode 100644 index 000000000..fc7a79c37 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-310.pyc new file mode 100644 index 000000000..0fce70324 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-310.pyc new file mode 100644 index 000000000..9ed196be4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-310.pyc new file mode 100644 index 000000000..98bd6b9ca Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc new file mode 100644 index 000000000..2f4048ffb Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-310.pyc new file mode 100644 index 000000000..be76f55bf Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-310.pyc new file mode 100644 index 000000000..1901ad677 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc new file mode 100644 index 000000000..83776beb1 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/compat.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 000000000..1fe3d225a --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1116 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib2 import HTTPSHandler + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + # Leaving this around for now, in case it needs resurrecting in some way + # _userprog = None + # def splituser(host): + # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + # global _userprog + # if _userprog is None: + # import re + # _userprog = re.compile('^(.*)@(.*)$') + + # match = _userprog.match(host) + # if match: return match.group(1, 2) + # return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPHandler, HTTPRedirectHandler, + build_opener) + if ssl: + from urllib.request import HTTPSHandler + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: # pragma: no cover + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +import shutil +import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections.abc import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + # Issue #99: on some systems (e.g. containerised), + # sys.getfilesystemencoding() returns None, and we need a real value, + # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and + # sys.getfilesystemencoding(): the return value is "the user’s preference + # according to the result of nl_langinfo(CODESET), or None if the + # nl_langinfo(CODESET) failed." + _fsencoding = sys.getfilesystemencoding() or 'utf-8' + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from importlib.util import cache_from_source # Python >= 3.4 +except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/database.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/database.py new file mode 100644 index 000000000..f48699441 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/database.py @@ -0,0 +1,1345 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + try: + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, + WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + except Exception as e: + msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s' + logger.warning(msg, r.path, e) + import warnings + warnings.warn(msg % (r.path, e), stacklevel=2) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if version is not None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + # We hit a problem on Travis where enum34 was installed and doesn't + # have a provides attribute ... + if not hasattr(dist, 'provides'): + logger.debug('No "provides": %s', dist) + else: + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + logger.debug('Getting requirements from metadata %r', md.todict()) + reqts = getattr(md, req_attr) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.modules = [] + self.finder = finder = resources.finder_for_path(path) + if finder is None: + raise ValueError('finder unavailable for %s' % path) + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find(LEGACY_METADATA_FILENAME) + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + r = finder.find('REQUESTED') + self.requested = r is not None + p = os.path.join(path, 'top_level.txt') + if os.path.exists(p): + with open(p, 'rb') as f: + data = f.read().decode('utf-8') + self.modules = data.splitlines() + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + tl_path = tl_data = None + if path.endswith('.egg'): + if os.path.isdir(path): + p = os.path.join(path, 'EGG-INFO') + meta_path = os.path.join(p, 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(p, 'requires.txt') + tl_path = os.path.join(p, 'top_level.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + tl_path = os.path.join(path, 'top_level.txt') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + # look for top-level modules in top_level.txt, if present + if tl_data is None: + if tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + if not tl_data: + tl_data = [] + else: + tl_data = tl_data.splitlines() + self.modules = tl_data + return metadata + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = [] # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop()[0] + req.append(d) + for pred in graph.adjacency_list[d]: + if pred not in req: + todo.append(pred) + + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Placeholder for summary' + return Distribution(md) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/index.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/index.py new file mode 100644 index 000000000..b1fbbf8e8 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/index.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + with open(os.devnull, 'w') as sink: + # Use gpg by default rather than gpg2, as gpg2 insists on + # prompting for passwords + for s in ('gpg', 'gpg2'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from .util import _get_pypirc_command as cmd + return cmd() + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils. This populates + ``username``, ``password``, ``realm`` and ``url`` attributes from the + configuration. + """ + from .util import _load_pypirc + cfg = _load_pypirc(self) + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + """ + self.check_credentials() + from .util import _store_pypirc + _store_pypirc(self) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, + keystore=None): + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protocol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): + if isinstance(terms, string_types): + terms = {'name': terms} + rpc_proxy = ServerProxy(self.url, timeout=3.0) + try: + return rpc_proxy.search(terms, operator or 'and') + finally: + rpc_proxy('close')() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/locators.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/locators.py new file mode 100644 index 000000000..c78bc9e23 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/locators.py @@ -0,0 +1,1300 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, text_type, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata, MetadataInvalidError +from .util import (cached_property, ensure_slash, split_filename, get_project_data, + parse_requirement, parse_name_and_version, ServerProxy, + normalize_name) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + try: + return client.list_packages() + finally: + client('close')() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: # pragma: no cover + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + self.errors = queue.Queue() + + def get_errors(self): + """ + Return any errors which have occurred. + """ + result = [] + while not self.errors.empty(): # pragma: no cover + try: + e = self.errors.get(False) + result.append(e) + except self.errors.Empty: + continue + self.errors.task_done() + return result + + def clear_errors(self): + """ + Clear any errors which may have been logged. + """ + # Just get the errors and throw them away + self.get_errors() + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: # pragma: no cover + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + self.clear_errors() + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + is_downloadable = basename.endswith(self.downloadable_extensions) + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme == 'https', 'pypi.org' in t.netloc, + is_downloadable, is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + return normalize_name(name1) == normalize_name(name2) + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): # pragma: no cover + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': # pragma: no cover + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if not is_compatible(wheel, self.wheel_tags): + logger.debug('Wheel not compatible: %s', path) + else: + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: # pragma: no cover + logger.warning('invalid path for wheel: %s', path) + elif not path.endswith(self.downloadable_extensions): # pragma: no cover + logger.debug('Not downloadable: %s', path) + else: # downloadable extension + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: # pragma: no cover + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: # pragma: no cover + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at a "digests" dictionary + or keys of the form 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + if 'digests' in info: + digests = info['digests'] + for algo in ('sha256', 'md5'): + if algo in digests: + result = (algo, digests[algo]) + break + if not result: + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: # pragma: no cover + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + pass # logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + # else: + # logger.debug('skipping pre-release ' + # 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: # pragma: no cover + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + self.errors.put(text_type(e)) + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)? +href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)) +(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + self.platform_check = False # See issue #112 + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.daemon = True + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|' + r'win(32|_amd64)|macosx_?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self.platform_check and self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + try: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + except MetadataInvalidError: # e.g. invalid versions + pass + except Exception as e: # pragma: no cover + self.errors.put(text_type(e)) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. +default_locator = AggregatingLocator( + JSONLocator(), + SimpleScrapingLocator('https://pypi.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if meta_extras and dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py new file mode 100644 index 000000000..ca0fe442d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2013 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re +import sys + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + +# +# Due to the different results returned by fnmatch.translate, we need +# to do slightly different processing for Python 2.7 and 3.2 ... this needed +# to be brought in for Python 3.6 onwards. +# +_PYTHON_VERSION = sys.version_info[:2] + +class Manifest(object): + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=True) + #if not found: + # logger.warning('no previously-included files ' + # 'found matching %r', pattern) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=False) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found anywhere in ' + # 'distribution', pattern) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, prefix=thedir) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found under directory %r', + # pattern, thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if _PYTHON_VERSION > (3, 2): + # ditch start and end characters + start, _, end = self._glob_to_re('_').partition('_') + + if pattern: + pattern_re = self._glob_to_re(pattern) + if _PYTHON_VERSION > (3, 2): + assert pattern_re.startswith(start) and pattern_re.endswith(end) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + if _PYTHON_VERSION <= (3, 2): + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + else: + prefix_re = self._glob_to_re(prefix) + assert prefix_re.startswith(start) and prefix_re.endswith(end) + prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: + pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] + pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, + pattern_re, end) + else: # no prefix -- respect anchor flag + if anchor: + if _PYTHON_VERSION <= (3, 2): + pattern_re = '^' + base + pattern_re + else: + pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + '!=': lambda x, y: x != y, + '<': lambda x, y: x < y, + '<=': lambda x, y: x == y or x < y, + '>': lambda x, y: x > y, + '>=': lambda x, y: x == y or x > y, + 'and': lambda x, y: x and y, + 'or': lambda x, y: x or y, + 'in': lambda x, y: x in y, + 'not in': lambda x, y: x not in y, + } + + def evaluate(self, expr, context): + """ + Evaluate a marker expression returned by the :func:`parse_requirement` + function in the specified context. + """ + if isinstance(expr, string_types): + if expr[0] in '\'"': + result = expr[1:-1] + else: + if expr not in context: + raise SyntaxError('unknown variable: %s' % expr) + result = context[expr] + else: + assert isinstance(expr, dict) + op = expr['op'] + if op not in self.operations: + raise NotImplementedError('op not implemented: %s' % op) + elhs = expr['lhs'] + erhs = expr['rhs'] + if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): + raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + + lhs = self.evaluate(elhs, context) + rhs = self.evaluate(erhs, context) + if ((elhs == 'python_version' or erhs == 'python_version') and + op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')): + lhs = NV(lhs) + rhs = NV(rhs) + elif elhs == 'python_version' and op in ('in', 'not in'): + lhs = NV(lhs) + rhs = _get_versions(rhs) + result = self.operations[op](lhs, rhs) + return result + +_DIGITS = re.compile(r'\d+\.\d+') + +def default_context(): + def format_full_version(info): + version = '%s.%s.%s' % (info.major, info.minor, info.micro) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + if hasattr(sys, 'implementation'): + implementation_version = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + implementation_version = '0' + implementation_name = '' + + ppv = platform.python_version() + m = _DIGITS.match(ppv) + pv = m.group(0) + result = { + 'implementation_name': implementation_name, + 'implementation_version': implementation_version, + 'os_name': os.name, + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), + 'platform_release': platform.release(), + 'platform_system': platform.system(), + 'platform_version': platform.version(), + 'platform_in_venv': str(in_venv()), + 'python_full_version': ppv, + 'python_version': pv, + 'sys_platform': sys.platform, + } + return result + +DEFAULT_CONTEXT = default_context() +del default_context + +evaluator = Evaluator() + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + try: + expr, rest = parse_marker(marker) + except Exception as e: + raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + if rest and rest[0] != '#': + raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + context = dict(DEFAULT_CONTEXT) + if execution_context: + context.update(execution_context) + return evaluator.evaluate(expr, context) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/metadata.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/metadata.py new file mode 100644 index 000000000..6a26b0ab2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1058 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and withdrawn 2.0). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \\|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in +# the metadata. Include them in the tuple literal below to allow them +# (for now). +# Ditto for Obsoletes - see issue #140. +_566_FIELDS = _426_FIELDS + ('Description-Content-Type', + 'Requires', 'Provides', 'Obsoletes') + +_566_MARKERS = ('Description-Content-Type',) + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) +_ALL_FIELDS.update(_566_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version in ('1.3', '2.1'): + # avoid adding field names if already there + return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS) + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + logger.debug('Removed 1.0 due to %s', key) + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + logger.debug('Removed 1.1 due to %s', key) + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + logger.debug('Removed 1.2 due to %s', key) + if key not in _566_FIELDS and '1.3' in possible_versions: + possible_versions.remove('1.3') + logger.debug('Removed 1.3 due to %s', key) + if key not in _566_FIELDS and '2.1' in possible_versions: + if key != 'Description': # In 2.1, description allowed after headers + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + logger.debug('Removed 2.0 due to %s', key) + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + logger.debug('Out of options - unknown metadata set: %s', fields) + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + if is_2_1: + return '2.1' + + return '2.0' + +# This follows the rules about transforming keys as described in +# https://www.python.org/dev/peps/pep-0566/#id17 +_ATTR2FIELD = { + name.lower().replace("-", "_"): name for name in _ALL_FIELDS +} +_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + + # PEP 566 specifies that the body be used for the description, if + # available + body = msg.get_payload() + self["Description"] = body if body else self["Description"] + # logger.debug('Attempting to set metadata for %s', self) + # self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + "'%s': '%s' is not valid (field '%s')", + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning("'%s': '%s' is not a valid version (field '%s')", + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append("Wrong value for '%s': %s" % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + This is as per https://www.python.org/dev/peps/pep-0566/#id17. + """ + self.set_metadata_version() + + fields = _version2fieldlist(self['Metadata-Version']) + + data = {} + + for field_name in fields: + if not skip_missing or field_name in self._fields: + key = _FIELD2ATTR[field_name] + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' +LEGACY_METADATA_FILENAME = 'METADATA' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if not d: + d = self._data.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError("'%s' is an invalid value for " + "the '%s' property" % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + ('extensions', 'python.details', 'license'): 'License', + 'summary': 'Summary', + 'description': 'Description', + ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page', + ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author', + ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email', + 'source_url': 'Download-URL', + ('extensions', 'python.details', 'classifiers'): 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + # import pdb; pdb.set_trace() + for nk, ok in self.LEGACY_MAPPING.items(): + if not isinstance(nk, tuple): + if nk in nmd: + result[ok] = nmd[nk] + else: + d = nmd + found = True + for k in nk: + try: + d = d[k] + except (KeyError, IndexError): + found = False + break + if found: + result[ok] = d + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: any other fields wanted + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py new file mode 100644 index 000000000..fef52aa10 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py @@ -0,0 +1,358 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2017 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + # In Python 3.6, _frozen_importlib -> _frozen_importlib_external + try: + import _frozen_importlib_external as _fi + except ImportError: + import _frozen_importlib as _fi + _finder_registry[_fi.SourceFileLoader] = ResourceFinder + _finder_registry[_fi.FileFinder] = ResourceFinder + # See issue #146 + _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder + del _fi +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py new file mode 100644 index 000000000..913912c7b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, get_platform, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- +import re +import sys +from %(module)s import %(import_name)s +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +''' + + +def enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + +# Keep the old name around (for now), as there is at least one project using it! +_enquote_executable = enquote_executable + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + self.version_info = sys.version_info + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _build_shebang(self, executable, post_interp): + """ + Build a shebang line. In the simple case (on Windows, or a shebang line + which is not too long or contains spaces) use a simple formulation for + the shebang. Otherwise, use /bin/sh as the executable, with a contrived + shebang which allows the script to run either under Python or sh, using + suitable quoting. Thanks to Harald Nordgren for his input. + + See also: http://www.in-ulm.de/~mascheck/various/shebang/#length + https://hg.mozilla.org/mozilla-central/file/tip/mach + """ + if os.name != 'posix': + simple_shebang = True + else: + # Add 3 for '#!' prefix and newline suffix. + shebang_length = len(executable) + len(post_interp) + 3 + if sys.platform == 'darwin': + max_shebang_length = 512 + else: + max_shebang_length = 127 + simple_shebang = ((b' ' not in executable) and + (shebang_length <= max_shebang_length)) + + if simple_shebang: + result = b'#!' + executable + post_interp + b'\n' + else: + result = b'#!/bin/sh\n' + result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' + result += b"' '''" + return result + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if not os.path.isfile(executable): + # for Python builds from source on Windows, no Python executables with + # a version suffix are created, so we use python.exe + executable = os.path.join(sysconfig.get_config_var('BINDIR'), + 'python%s' % (sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + + # Normalise case for Windows - COMMENTED OUT + # executable = os.path.normcase(executable) + # N.B. The normalising operation above has been commented out: See + # issue #124. Although paths in Windows are generally case-insensitive, + # they aren't always. For example, a path containing a ẞ (which is a + # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a + # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by + # Windows as equivalent in path names. + + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = self._build_shebang(executable, post_interp) + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + import_name=entry.suffix.split('.')[0], + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not shebang.endswith(linesep): + shebang += linesep + if not use_launcher: + script_bytes = shebang + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + variant_separator = '-' + + def get_script_filenames(self, name): + result = set() + if '' in self.variants: + result.add(name) + if 'X' in self.variants: + result.add('%s%s' % (name, self.version_info[0])) + if 'X.Y' in self.variants: + result.add('%s%s%s.%s' % (name, self.variant_separator, + self.version_info[0], self.version_info[1])) + return result + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + scriptnames = self.get_script_filenames(entry.name) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s is an empty file (skipping)', script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + platform_suffix = '-arm' if get_platform() == 'win-arm64' else '' + name = '%s%s%s.exe' % (kind, bits, platform_suffix) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + resource = finder(distlib_package).find(name) + if not resource: + msg = ('Unable to find resource %s in package %s' % (name, + distlib_package)) + raise ValueError(msg) + return resource.bytes + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/util.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 000000000..dd01849d9 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,1932 @@ +# +# Copyright (C) 2012-2021 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import socket +try: + import ssl +except ImportError: # pragma: no cover + ssl = None +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, BaseConfigurator, valid_ident, + Container, configparser, URLError, ZipFile, fsdecode, + unquote, urlparse) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code as per PEP 508 +# + +IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') +VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') +COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') +MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') +OR = re.compile(r'^or\b\s*') +AND = re.compile(r'^and\b\s*') +NON_SPACE = re.compile(r'(\S+)\s*') +STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') + + +def parse_marker(marker_string): + """ + Parse a marker string and return a dictionary containing a marker expression. + + The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in + the expression grammar, or strings. A string contained in quotes is to be + interpreted as a literal string, and a string not contained in quotes is a + variable (such as os_name). + """ + def marker_var(remaining): + # either identifier, or literal string + m = IDENTIFIER.match(remaining) + if m: + result = m.groups()[0] + remaining = remaining[m.end():] + elif not remaining: + raise SyntaxError('unexpected end of input') + else: + q = remaining[0] + if q not in '\'"': + raise SyntaxError('invalid expression: %s' % remaining) + oq = '\'"'.replace(q, '') + remaining = remaining[1:] + parts = [q] + while remaining: + # either a string chunk, or oq, or q to terminate + if remaining[0] == q: + break + elif remaining[0] == oq: + parts.append(oq) + remaining = remaining[1:] + else: + m = STRING_CHUNK.match(remaining) + if not m: + raise SyntaxError('error in string literal: %s' % remaining) + parts.append(m.groups()[0]) + remaining = remaining[m.end():] + else: + s = ''.join(parts) + raise SyntaxError('unterminated string: %s' % s) + parts.append(q) + result = ''.join(parts) + remaining = remaining[1:].lstrip() # skip past closing quote + return result, remaining + + def marker_expr(remaining): + if remaining and remaining[0] == '(': + result, remaining = marker(remaining[1:].lstrip()) + if remaining[0] != ')': + raise SyntaxError('unterminated parenthesis: %s' % remaining) + remaining = remaining[1:].lstrip() + else: + lhs, remaining = marker_var(remaining) + while remaining: + m = MARKER_OP.match(remaining) + if not m: + break + op = m.groups()[0] + remaining = remaining[m.end():] + rhs, remaining = marker_var(remaining) + lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + result = lhs + return result, remaining + + def marker_and(remaining): + lhs, remaining = marker_expr(remaining) + while remaining: + m = AND.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_expr(remaining) + lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + def marker(remaining): + lhs, remaining = marker_and(remaining) + while remaining: + m = OR.match(remaining) + if not m: + break + remaining = remaining[m.end():] + rhs, remaining = marker_and(remaining) + lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + return lhs, remaining + + return marker(marker_string) + + +def parse_requirement(req): + """ + Parse a requirement passed in as a string. Return a Container + whose attributes contain the various parts of the requirement. + """ + remaining = req.strip() + if not remaining or remaining.startswith('#'): + return None + m = IDENTIFIER.match(remaining) + if not m: + raise SyntaxError('name expected: %s' % remaining) + distname = m.groups()[0] + remaining = remaining[m.end():] + extras = mark_expr = versions = uri = None + if remaining and remaining[0] == '[': + i = remaining.find(']', 1) + if i < 0: + raise SyntaxError('unterminated extra: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + extras = [] + while s: + m = IDENTIFIER.match(s) + if not m: + raise SyntaxError('malformed extra: %s' % s) + extras.append(m.groups()[0]) + s = s[m.end():] + if not s: + break + if s[0] != ',': + raise SyntaxError('comma expected in extras: %s' % s) + s = s[1:].lstrip() + if not extras: + extras = None + if remaining: + if remaining[0] == '@': + # it's a URI + remaining = remaining[1:].lstrip() + m = NON_SPACE.match(remaining) + if not m: + raise SyntaxError('invalid URI: %s' % remaining) + uri = m.groups()[0] + t = urlparse(uri) + # there are issues with Python and URL parsing, so this test + # is a bit crude. See bpo-20271, bpo-23505. Python doesn't + # always parse invalid URLs correctly - it should raise + # exceptions for malformed URLs + if not (t.scheme and t.netloc): + raise SyntaxError('Invalid URL: %s' % uri) + remaining = remaining[m.end():].lstrip() + else: + + def get_versions(ver_remaining): + """ + Return a list of operator, version tuples if any are + specified, else None. + """ + m = COMPARE_OP.match(ver_remaining) + versions = None + if m: + versions = [] + while True: + op = m.groups()[0] + ver_remaining = ver_remaining[m.end():] + m = VERSION_IDENTIFIER.match(ver_remaining) + if not m: + raise SyntaxError('invalid version: %s' % ver_remaining) + v = m.groups()[0] + versions.append((op, v)) + ver_remaining = ver_remaining[m.end():] + if not ver_remaining or ver_remaining[0] != ',': + break + ver_remaining = ver_remaining[1:].lstrip() + # Some packages have a trailing comma which would break things + # See issue #148 + if not ver_remaining: + break + m = COMPARE_OP.match(ver_remaining) + if not m: + raise SyntaxError('invalid constraint: %s' % ver_remaining) + if not versions: + versions = None + return versions, ver_remaining + + if remaining[0] != '(': + versions, remaining = get_versions(remaining) + else: + i = remaining.find(')', 1) + if i < 0: + raise SyntaxError('unterminated parenthesis: %s' % remaining) + s = remaining[1:i] + remaining = remaining[i + 1:].lstrip() + # As a special diversion from PEP 508, allow a version number + # a.b.c in parentheses as a synonym for ~= a.b.c (because this + # is allowed in earlier PEPs) + if COMPARE_OP.match(s): + versions, _ = get_versions(s) + else: + m = VERSION_IDENTIFIER.match(s) + if not m: + raise SyntaxError('invalid constraint: %s' % s) + v = m.groups()[0] + s = s[m.end():].lstrip() + if s: + raise SyntaxError('invalid constraint: %s' % s) + versions = [('~=', v)] + + if remaining: + if remaining[0] != ';': + raise SyntaxError('invalid requirement: %s' % remaining) + remaining = remaining[1:].lstrip() + + mark_expr, remaining = parse_marker(remaining) + + if remaining and remaining[0] != '#': + raise SyntaxError('unexpected trailing data: %s' % remaining) + + if not versions: + rs = distname + else: + rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + return Container(name=distname, extras=extras, constraints=versions, + marker=mark_expr, url=uri, requirement=rs) + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(root, path): + # normalizes and returns a lstripped-/-separated path + root = root.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(root) + return path[len(root):].lstrip('/') + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on OS X +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + # Avoid normcasing: see issue #143 + # result = os.path.normcase(sys.executable) + result = sys.executable + if not isinstance(result, text_type): + result = fsdecode(result) + return result + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + if os.path.exists(path): + os.remove(path) + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.write_binary_file(path, data.encode(encoding)) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + compile_kwargs = {} + if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): + compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH + py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): # pragma: no cover + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException("Invalid specification " + "'%s'" % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException("Invalid specification " + "'%s'" % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.rsplit('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + if username: + username = unquote(username) + if password: + password = unquote(password) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + filename = unquote(filename).replace(' ', '-') + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + ct = headers.get('Content-Type') + if not ct.startswith('application/json'): + logger.debug('Unexpected response for JSON request: %s', ct) + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): # pragma: no cover + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: # pragma: no cover + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: # pragma: no cover + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + +if ssl: + from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, + CertificateError) + + +# +# HTTPSConnection which verifies certificates/matches domains +# + + class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + if hasattr(ssl, 'OP_NO_SSLv2'): + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: # pragma: no cover + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + + # + # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- + # Middle proxy using HTTP listens on port 443, or an index mistakenly serves + # HTML containing a http://xyz link when it should be https://xyz), + # you can use the following handler class, which does not allow HTTP traffic. + # + # It works by inheriting from HTTPHandler - so build_opener won't add a + # handler for HTTP itself. + # + class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + return self._connection[1] + +if ssl: + class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + return self._connection[1] + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + # scheme = splittype(uri) # deprecated as of Python 3.8 + scheme = urlparse(uri)[0] + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + # Python 3 determines encoding from locale. Force 'utf-8' + # file encoding to match other forced utf-8 encoding + kwargs['encoding'] = 'utf-8' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + + +class SubprocessMixin(object): + """ + Mixin for running subprocesses and capturing their output + """ + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p + + +def normalize_name(name): + """Normalize a python package name a la PEP 503""" + # https://www.python.org/dev/peps/pep-0503/#normalized-names + return re.sub('[-_.]+', '-', name).lower() + +# def _get_pypirc_command(): + # """ + # Get the distutils command for interacting with PyPI configurations. + # :return: the command. + # """ + # from distutils.core import Distribution + # from distutils.config import PyPIRCCommand + # d = Distribution() + # return PyPIRCCommand(d) + +class PyPIRCFile(object): + + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' + DEFAULT_REALM = 'pypi' + + def __init__(self, fn=None, url=None): + if fn is None: + fn = os.path.join(os.path.expanduser('~'), '.pypirc') + self.filename = fn + self.url = url + + def read(self): + result = {} + + if os.path.exists(self.filename): + repository = self.url or self.DEFAULT_REPOSITORY + + config = configparser.RawConfigParser() + config.read(self.filename) + sections = config.sections() + if 'distutils' in sections: + # let's get the list of servers + index_servers = config.get('distutils', 'index-servers') + _servers = [server.strip() for server in + index_servers.split('\n') + if server.strip() != ''] + if _servers == []: + # nothing set, let's try to get the default pypi + if 'pypi' in sections: + _servers = ['pypi'] + else: + for server in _servers: + result = {'server': server} + result['username'] = config.get(server, 'username') + + # optional params + for key, default in (('repository', self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None)): + if config.has_option(server, key): + result[key] = config.get(server, key) + else: + result[key] = default + + # work around people having "repository" for the "pypi" + # section of their config set to the HTTP (rather than + # HTTPS) URL + if (server == 'pypi' and + repository in (self.DEFAULT_REPOSITORY, 'pypi')): + result['repository'] = self.DEFAULT_REPOSITORY + elif (result['server'] != repository and + result['repository'] != repository): + result = {} + elif 'server-login' in sections: + # old format + server = 'server-login' + if config.has_option(server, 'repository'): + repository = config.get(server, 'repository') + else: + repository = self.DEFAULT_REPOSITORY + result = { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM + } + return result + + def update(self, username, password): + # import pdb; pdb.set_trace() + config = configparser.RawConfigParser() + fn = self.filename + config.read(fn) + if not config.has_section('pypi'): + config.add_section('pypi') + config.set('pypi', 'username', username) + config.set('pypi', 'password', password) + with open(fn, 'w') as f: + config.write(f) + +def _load_pypirc(index): + """ + Read the PyPI access configuration as supported by distutils. + """ + return PyPIRCFile(url=index.url).read() + +def _store_pypirc(index): + PyPIRCFile().update(index.username, index.password) + +# +# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor +# tweaks +# + +def get_host_platform(): + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ + if os.name == 'nt': + if 'amd64' in sys.version.lower(): + return 'win-amd64' + if '(arm)' in sys.version.lower(): + return 'win-arm32' + if '(arm64)' in sys.version.lower(): + return 'win-arm64' + return sys.platform + + # Set for cross builds explicitly + if "_PYTHON_HOST_PLATFORM" in os.environ: + return os.environ["_PYTHON_HOST_PLATFORM"] + + if os.name != 'posix' or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + + (osname, host, release, version, machine) = os.uname() + + # Convert the OS name to lowercase, remove '/' characters, and translate + # spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_').replace('/', '-') + + if osname[:5] == 'linux': + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + + elif osname[:5] == 'sunos': + if release[0] >= '5': # SunOS 5 == Solaris 2 + osname = 'solaris' + release = '%d.%s' % (int(release[0]) - 3, release[2:]) + # We can't use 'platform.architecture()[0]' because a + # bootstrap problem. We use a dict to get an error + # if some suspicious happens. + bitness = {2147483647:'32bit', 9223372036854775807:'64bit'} + machine += '.%s' % bitness[sys.maxsize] + # fall through to standard osname-release-machine representation + elif osname[:3] == 'aix': + from _aix_support import aix_platform + return aix_platform() + elif osname[:6] == 'cygwin': + osname = 'cygwin' + rel_re = re.compile (r'[\d.]+', re.ASCII) + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == 'darwin': + import _osx_support, distutils.sysconfig + osname, release, machine = _osx_support.get_platform_osx( + distutils.sysconfig.get_config_vars(), + osname, release, machine) + + return '%s-%s-%s' % (osname, release, machine) + + +_TARGET_TO_PLAT = { + 'x86' : 'win32', + 'x64' : 'win-amd64', + 'arm' : 'win-arm32', +} + + +def get_platform(): + if os.name != 'nt': + return get_host_platform() + cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') + if cross_compilation_target not in _TARGET_TO_PLAT: + return get_host_platform() + return _TARGET_TO_PLAT[cross_compilation_target] diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/version.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/version.py new file mode 100644 index 000000000..c7c8bb6ff --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/version.py @@ -0,0 +1,739 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2017 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types +from .util import parse_requirement + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + # this is a method only to support alternative implementations + # via overriding + def parse_requirement(self, s): + return parse_requirement(s) + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + r = self.parse_requirement(s) + if not r: + raise ValueError('Not valid: %r' % s) + self.name = r.name + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if r.constraints: + # import pdb; pdb.set_trace() + for op, s in r.constraints: + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: String or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' + r'(\.(post)(\d+))?(\.(dev)(\d+))?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0][:-1]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + #print('%s -> %s' % (s, m.groups())) + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # minimum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + #import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + #import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is probably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + #TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile(r'^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + # See issue #140. Be tolerant of a single trailing comma. + if s.endswith(','): + s = s[:-1] + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/wheel.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/wheel.py new file mode 100644 index 000000000..48abfde5b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distlib/wheel.py @@ -0,0 +1,1053 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2020 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import base64 +import codecs +import datetime +from email import message_from_file +import hashlib +import imp +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import zipfile + +from . import __version__, DistlibException +from .compat import sysconfig, ZipFile, fsdecode, text_type, filter +from .database import InstalledDistribution +from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME, + LEGACY_METADATA_FILENAME) +from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, + cached_property, get_cache_base, read_exports, tempdir, + get_platform) +from .version import NormalizedVersion, UnsupportedVersionError + +logger = logging.getLogger(__name__) + +cache = None # created when needed + +if hasattr(sys, 'pypy_version_info'): # pragma: no cover + IMP_PREFIX = 'pp' +elif sys.platform.startswith('java'): # pragma: no cover + IMP_PREFIX = 'jy' +elif sys.platform == 'cli': # pragma: no cover + IMP_PREFIX = 'ip' +else: + IMP_PREFIX = 'cp' + +VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') +if not VER_SUFFIX: # pragma: no cover + VER_SUFFIX = '%s%s' % sys.version_info[:2] +PYVER = 'py' + VER_SUFFIX +IMPVER = IMP_PREFIX + VER_SUFFIX + +ARCH = get_platform().replace('-', '_').replace('.', '_') + +ABI = sysconfig.get_config_var('SOABI') +if ABI and ABI.startswith('cpython-'): + ABI = ABI.replace('cpython-', 'cp').split('-')[0] +else: + def _derive_abi(): + parts = ['cp', VER_SUFFIX] + if sysconfig.get_config_var('Py_DEBUG'): + parts.append('d') + if sysconfig.get_config_var('WITH_PYMALLOC'): + parts.append('m') + if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: + parts.append('u') + return ''.join(parts) + ABI = _derive_abi() + del _derive_abi + +FILENAME_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # if file_version < (1, 1): + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, + # LEGACY_METADATA_FILENAME] + # else: + # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] + fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME] + result = None + for fn in fns: + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + if result: + break + except KeyError: + pass + if not result: + raise ValueError('Invalid wheel, because metadata is ' + 'missing: looked in %s' % ', '.join(fns)) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + records = list(records) # make a copy, as mutated + p = to_posix(os.path.relpath(record_path, base)) + records.append((p, '', '')) + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # sort the entries by archive path. Not needed by any spec, but it + # keeps the archive listing and RECORD tidier than they would otherwise + # be. Use the number of path segments to keep directory entries together, + # and keep the dist-info stuff at the end. + def sorter(t): + ap = t[0] + n = ap.count('/') + if '.dist-info' in ap: + n += 10000 + return (n, ap) + archive_paths = sorted(archive_paths, key=sorter) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def skip_entry(self, arcname): + """ + Determine whether an archive entry should be skipped when verifying + or installing. + """ + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + # We also skip directories, as they won't be in RECORD + # either. See: + # + # https://github.com/pypa/wheel/issues/294 + # https://github.com/pypa/wheel/issues/287 + # https://github.com/pypa/wheel/pull/289 + # + return arcname.endswith(('/', '/RECORD.jws')) + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. If kwarg ``bytecode_hashed_invalidation`` is True, written + bytecode will try to use file-hash based invalidation (PEP-552) on + supported interpreter versions (CPython 2.7+). + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + # Issue #147: permission bits aren't preserved. Using + # zf.extract(zinfo, libdir) should have worked, but didn't, + # see https://www.thetopsites.net/article/53834422.shtml + # So ... manually preserve permission bits as given in zinfo + if os.name == 'posix': + # just set the normal permission bits + os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile, + hashed_invalidation=bc_hashed_invalidation) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' [%s]' % ','.join(v.flags) + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + '%s.%s' % sys.version_info[:2]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # See issue #115: some wheels have .. in their entries, but + # in the filename ... e.g. __main__..py ! So the check is + # updated to look for .. in the directory portions + p = u_arcname.split('/') + if '..' in p: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + if self.skip_entry(u_arcname): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = path.endswith(LEGACY_METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def _get_glibc_version(): + import platform + ver = platform.libc_ver() + result = [] + if ver[0] == 'glibc': + for s in ver[1].split('.'): + result.append(int(s) if s.isdigit() else 0) + result = tuple(result) + return result + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + # manylinux + if abi != 'none' and sys.platform.startswith('linux'): + arch = arch.replace('linux_', '') + parts = _get_glibc_version() + if len(parts) == 2: + if parts >= (2, 5): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux1_%s' % arch)) + if parts >= (2, 12): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2010_%s' % arch)) + if parts >= (2, 17): + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux2014_%s' % arch)) + result.append((''.join((IMP_PREFIX, versions[0])), abi, + 'manylinux_%s_%s_%s' % (parts[0], parts[1], + arch))) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/distro.py b/myenv/lib/python3.10/site-packages/pip/_vendor/distro.py new file mode 100644 index 000000000..789274134 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/distro.py @@ -0,0 +1,1386 @@ +# Copyright 2015,2016,2017 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings + +__version__ = "1.6.0" + +# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2 +# support, can use typing.TYPE_CHECKING instead. See: +# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING +if False: # pragma: nocover + from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, + TypedDict, + Union, + ) + + VersionDict = TypedDict( + "VersionDict", {"major": str, "minor": str, "build_number": str} + ) + InfoDict = TypedDict( + "InfoDict", + { + "id": str, + "version": str, + "version_parts": VersionDict, + "like": str, + "codename": str, + }, + ) + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", +) + + +def linux_distribution(full_distribution_name=True): + # type: (bool) -> Tuple[str, str, str] + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id(): + # type: () -> str + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amazon" Amazon Linux + "arch" Arch Linux + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty=False): + # type: (bool) -> str + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty=False, best=False): + # type: (bool, bool) -> str + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best=False): + # type: (bool) -> Tuple[str, str, str] + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best=False): + # type: (bool) -> str + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best=False): + # type: (bool) -> str + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best=False): + # type: (bool) -> str + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like(): + # type: () -> str + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename(): + # type: () -> str + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty=False, best=False): + # type: (bool, bool) -> InfoDict + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info(): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute): + # type: (str) -> str + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property(object): # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f): + # type: (Callable[[Any], Any]) -> None + self._fname = f.__name__ + self._f = f + + def __get__(self, obj, owner): + # type: (Any, Type[Any]) -> Any + assert obj is not None, "call {} on an instance".format(self._fname) + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution(object): + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb=True, + os_release_file="", + distro_release_file="", + include_uname=True, + root_dir=None, + ): + # type: (bool, str, str, bool, Optional[str]) -> None + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + Raises: + + * :py:exc:`IOError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had + some issue (other than not being available in the program execution + path). + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + self.include_lsb = include_lsb + self.include_uname = include_uname + + def __repr__(self): + # type: () -> str + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r})".format(self=self) + ) + + def linux_distribution(self, full_distribution_name=True): + # type: (bool) -> Tuple[str, str, str] + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self.codename(), + ) + + def id(self): + # type: () -> str + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id, table): + # type: (str, Dict[str, str]) -> str + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty=False): + # type: (bool) -> str + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = name + " " + version + return name or "" + + def version(self, pretty=False, best=False): + # type: (bool, bool) -> str + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = "{0} ({1})".format(version, self.codename()) + return version + + def version_parts(self, best=False): + # type: (bool) -> Tuple[str, str, str] + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best=False): + # type: (bool) -> str + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best=False): + # type: (bool) -> str + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best=False): + # type: (bool) -> str + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self): + # type: () -> str + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self): + # type: () -> str + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty=False, best=False): + # type: (bool, bool) -> InfoDict + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return dict( + id=self.id(), + version=self.version(pretty, best), + version_parts=dict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self): + # type: () -> Dict[str, str] + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def os_release_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute): + # type: (str) -> str + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self): + # type: () -> Dict[str, str] + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file) as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines): + # type: (TextIO) -> Dict[str, str] + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + # The shlex module defines its `wordchars` variable using literals, + # making it dependent on the encoding of the Python source file. + # In Python 2.6 and 2.7, the shlex source file is encoded in + # 'iso-8859-1', and the `wordchars` variable is defined as a byte + # string. This causes a UnicodeDecodeError to be raised when the + # parsed content is a unicode object. The following fix resolves that + # (... but it should be fixed in shlex...): + if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): + lexer.wordchars = lexer.wordchars.decode("iso-8859-1") + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + else: + # Ignore any tokens that are not variable assignments + pass + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + elif "version" in props: + # If there is no version_codename, parse it from the version + match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"]) + if match: + codename = match.group() + codename = codename.strip("()") + codename = codename.strip(",") + codename = codename.strip() + # codename appears within paranthese. + props["codename"] = codename + + return props + + @cached_property + def _lsb_release_info(self): + # type: () -> Dict[str, str] + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + with open(os.devnull, "wb") as devnull: + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=devnull) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines): + # type: (Iterable[str]) -> Dict[str, str] + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self): + # type: () -> Dict[str, str] + with open(os.devnull, "wb") as devnull: + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=devnull) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @staticmethod + def _parse_uname_content(lines): + # type: (Sequence[str]) -> Dict[str, str] + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(text): + # type: (Union[bytes, str]) -> str + encoding = sys.getfilesystemencoding() + encoding = "utf-8" if encoding == "ascii" else encoding + + if sys.version_info[0] >= 3: + if isinstance(text, bytes): + return text.decode(encoding) + else: + if isinstance(text, unicode): # noqa + return text.encode(encoding) + + return text + + @cached_property + def _distro_release_info(self): + # type: () -> Dict[str, str] + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): + distro_info["id"] = "cloudlinux" + elif match: + distro_info["id"] = match.group(1) + return distro_info + else: + try: + basenames = os.listdir(self.etc_dir) + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = [ + "SuSE-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "sl-release", + "slackware-version", + ] + for basename in basenames: + if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: + continue + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match: + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + if "name" in distro_info: + # The name is always present if the pattern matches + self.distro_release_file = filepath + distro_info["id"] = match.group(1) + if "cloudlinux" in distro_info["name"].lower(): + distro_info["id"] = "cloudlinux" + return distro_info + return {} + + def _parse_distro_release_file(self, filepath): + # type: (str) -> Dict[str, str] + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath) as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except (OSError, IOError): + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line): + # type: (str) -> Dict[str, str] + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main(): + # type: () -> None + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, include_uname=False, root_dir=args.root_dir + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__init__.py new file mode 100644 index 000000000..d1d82f157 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__init__.py @@ -0,0 +1,35 @@ +""" +HTML parsing library based on the `WHATWG HTML specification +`_. The parser is designed to be compatible with +existing HTML found in the wild and implements well-defined error recovery that +is largely compatible with modern desktop web browsers. + +Example usage:: + + from pip._vendor import html5lib + with open("my_document.html", "rb") as f: + tree = html5lib.parse(f) + +For convenience, this module re-exports the following names: + +* :func:`~.html5parser.parse` +* :func:`~.html5parser.parseFragment` +* :class:`~.html5parser.HTMLParser` +* :func:`~.treebuilders.getTreeBuilder` +* :func:`~.treewalkers.getTreeWalker` +* :func:`~.serializer.serialize` +""" + +from __future__ import absolute_import, division, unicode_literals + +from .html5parser import HTMLParser, parse, parseFragment +from .treebuilders import getTreeBuilder +from .treewalkers import getTreeWalker +from .serializer import serialize + +__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", + "getTreeWalker", "serialize"] + +# this has to be at the top level, see how setup.py parses this +#: Distribution version number. +__version__ = "1.1" diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..df03058f2 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-310.pyc new file mode 100644 index 000000000..82d8c47e8 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-310.pyc new file mode 100644 index 000000000..b8d355f5b Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-310.pyc new file mode 100644 index 000000000..b697bd18e Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-310.pyc new file mode 100644 index 000000000..413ab9704 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc new file mode 100644 index 000000000..c24da6d43 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-310.pyc new file mode 100644 index 000000000..2f02a23d5 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-310.pyc new file mode 100644 index 000000000..0df6aeb31 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_ihatexml.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_ihatexml.py new file mode 100644 index 000000000..3ff803c19 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_ihatexml.py @@ -0,0 +1,289 @@ +from __future__ import absolute_import, division, unicode_literals + +import re +import warnings + +from .constants import DataLossWarning + +baseChar = """ +[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | +[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | +[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | +[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | +[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | +[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | +[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | +[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | +[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | +[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | +[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | +[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | +[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | +[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | +[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | +[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | +[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | +[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | +[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | +[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | +[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | +[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | +[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | +[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | +[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | +[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | +[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | +[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | +[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | +[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | +#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | +#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | +#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | +[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | +[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | +#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | +[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | +[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | +[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | +[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | +[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | +#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | +[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | +[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | +[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | +[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" + +ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" + +combiningCharacter = """ +[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | +[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | +[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | +[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | +#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | +[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | +[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | +#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | +[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | +[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | +#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | +[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | +[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | +[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | +[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | +[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | +#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | +[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | +#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | +[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | +[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | +#x3099 | #x309A""" + +digit = """ +[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | +[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | +[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | +[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" + +extender = """ +#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | +#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" + +letter = " | ".join([baseChar, ideographic]) + +# Without the +name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, + extender]) +nameFirst = " | ".join([letter, "_"]) + +reChar = re.compile(r"#x([\d|A-F]{4,4})") +reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") + + +def charStringToList(chars): + charRanges = [item.strip() for item in chars.split(" | ")] + rv = [] + for item in charRanges: + foundMatch = False + for regexp in (reChar, reCharRange): + match = regexp.match(item) + if match is not None: + rv.append([hexToInt(item) for item in match.groups()]) + if len(rv[-1]) == 1: + rv[-1] = rv[-1] * 2 + foundMatch = True + break + if not foundMatch: + assert len(item) == 1 + + rv.append([ord(item)] * 2) + rv = normaliseCharList(rv) + return rv + + +def normaliseCharList(charList): + charList = sorted(charList) + for item in charList: + assert item[1] >= item[0] + rv = [] + i = 0 + while i < len(charList): + j = 1 + rv.append(charList[i]) + while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: + rv[-1][1] = charList[i + j][1] + j += 1 + i += j + return rv + + +# We don't really support characters above the BMP :( +max_unicode = int("FFFF", 16) + + +def missingRanges(charList): + rv = [] + if charList[0] != 0: + rv.append([0, charList[0][0] - 1]) + for i, item in enumerate(charList[:-1]): + rv.append([item[1] + 1, charList[i + 1][0] - 1]) + if charList[-1][1] != max_unicode: + rv.append([charList[-1][1] + 1, max_unicode]) + return rv + + +def listToRegexpStr(charList): + rv = [] + for item in charList: + if item[0] == item[1]: + rv.append(escapeRegexp(chr(item[0]))) + else: + rv.append(escapeRegexp(chr(item[0])) + "-" + + escapeRegexp(chr(item[1]))) + return "[%s]" % "".join(rv) + + +def hexToInt(hex_str): + return int(hex_str, 16) + + +def escapeRegexp(string): + specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", + "[", "]", "|", "(", ")", "-") + for char in specialCharacters: + string = string.replace(char, "\\" + char) + + return string + +# output from the above +nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa + +# Simpler things +nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]") + + +class InfosetFilter(object): + replacementRegexp = re.compile(r"U[\dA-F]{5,5}") + + def __init__(self, + dropXmlnsLocalName=False, + dropXmlnsAttrNs=False, + preventDoubleDashComments=False, + preventDashAtCommentEnd=False, + replaceFormFeedCharacters=True, + preventSingleQuotePubid=False): + + self.dropXmlnsLocalName = dropXmlnsLocalName + self.dropXmlnsAttrNs = dropXmlnsAttrNs + + self.preventDoubleDashComments = preventDoubleDashComments + self.preventDashAtCommentEnd = preventDashAtCommentEnd + + self.replaceFormFeedCharacters = replaceFormFeedCharacters + + self.preventSingleQuotePubid = preventSingleQuotePubid + + self.replaceCache = {} + + def coerceAttribute(self, name, namespace=None): + if self.dropXmlnsLocalName and name.startswith("xmlns:"): + warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) + return None + elif (self.dropXmlnsAttrNs and + namespace == "http://www.w3.org/2000/xmlns/"): + warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) + return None + else: + return self.toXmlName(name) + + def coerceElement(self, name): + return self.toXmlName(name) + + def coerceComment(self, data): + if self.preventDoubleDashComments: + while "--" in data: + warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) + data = data.replace("--", "- -") + if data.endswith("-"): + warnings.warn("Comments cannot end in a dash", DataLossWarning) + data += " " + return data + + def coerceCharacters(self, data): + if self.replaceFormFeedCharacters: + for _ in range(data.count("\x0C")): + warnings.warn("Text cannot contain U+000C", DataLossWarning) + data = data.replace("\x0C", " ") + # Other non-xml characters + return data + + def coercePubid(self, data): + dataOutput = data + for char in nonPubidCharRegexp.findall(data): + warnings.warn("Coercing non-XML pubid", DataLossWarning) + replacement = self.getReplacementCharacter(char) + dataOutput = dataOutput.replace(char, replacement) + if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: + warnings.warn("Pubid cannot contain single quote", DataLossWarning) + dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) + return dataOutput + + def toXmlName(self, name): + nameFirst = name[0] + nameRest = name[1:] + m = nonXmlNameFirstBMPRegexp.match(nameFirst) + if m: + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) + nameFirstOutput = self.getReplacementCharacter(nameFirst) + else: + nameFirstOutput = nameFirst + + nameRestOutput = nameRest + replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) + for char in replaceChars: + warnings.warn("Coercing non-XML name: %s" % name, DataLossWarning) + replacement = self.getReplacementCharacter(char) + nameRestOutput = nameRestOutput.replace(char, replacement) + return nameFirstOutput + nameRestOutput + + def getReplacementCharacter(self, char): + if char in self.replaceCache: + replacement = self.replaceCache[char] + else: + replacement = self.escapeChar(char) + return replacement + + def fromXmlName(self, name): + for item in set(self.replacementRegexp.findall(name)): + name = name.replace(item, self.unescapeChar(item)) + return name + + def escapeChar(self, char): + replacement = "U%05X" % ord(char) + self.replaceCache[char] = replacement + return replacement + + def unescapeChar(self, charcode): + return chr(int(charcode[1:], 16)) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_inputstream.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_inputstream.py new file mode 100644 index 000000000..e0bb37602 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_inputstream.py @@ -0,0 +1,918 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type +from pip._vendor.six.moves import http_client, urllib + +import codecs +import re +from io import BytesIO, StringIO + +from pip._vendor import webencodings + +from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase +from .constants import _ReparseException +from . import _utils + +# Non-unicode versions of constants for use in the pre-parser +spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) +asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) +asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) +spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) + + +invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa + +if _utils.supports_lone_surrogates: + # Use one extra step of indirection and create surrogates with + # eval. Not using this indirection would introduce an illegal + # unicode literal on platforms not supporting such lone + # surrogates. + assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + + eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used + "]") +else: + invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) + +non_bmp_invalid_codepoints = {0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, + 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, + 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, + 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, + 0x10FFFE, 0x10FFFF} + +ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") + +# Cache for charsUntil() +charsUntilRegEx = {} + + +class BufferedStream(object): + """Buffering for streams that do not have buffering of their own + + The buffer is implemented as a list of chunks on the assumption that + joining many strings will be slow since it is O(n**2) + """ + + def __init__(self, stream): + self.stream = stream + self.buffer = [] + self.position = [-1, 0] # chunk number, offset + + def tell(self): + pos = 0 + for chunk in self.buffer[:self.position[0]]: + pos += len(chunk) + pos += self.position[1] + return pos + + def seek(self, pos): + assert pos <= self._bufferedBytes() + offset = pos + i = 0 + while len(self.buffer[i]) < offset: + offset -= len(self.buffer[i]) + i += 1 + self.position = [i, offset] + + def read(self, bytes): + if not self.buffer: + return self._readStream(bytes) + elif (self.position[0] == len(self.buffer) and + self.position[1] == len(self.buffer[-1])): + return self._readStream(bytes) + else: + return self._readFromBuffer(bytes) + + def _bufferedBytes(self): + return sum([len(item) for item in self.buffer]) + + def _readStream(self, bytes): + data = self.stream.read(bytes) + self.buffer.append(data) + self.position[0] += 1 + self.position[1] = len(data) + return data + + def _readFromBuffer(self, bytes): + remainingBytes = bytes + rv = [] + bufferIndex = self.position[0] + bufferOffset = self.position[1] + while bufferIndex < len(self.buffer) and remainingBytes != 0: + assert remainingBytes > 0 + bufferedData = self.buffer[bufferIndex] + + if remainingBytes <= len(bufferedData) - bufferOffset: + bytesToRead = remainingBytes + self.position = [bufferIndex, bufferOffset + bytesToRead] + else: + bytesToRead = len(bufferedData) - bufferOffset + self.position = [bufferIndex, len(bufferedData)] + bufferIndex += 1 + rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) + remainingBytes -= bytesToRead + + bufferOffset = 0 + + if remainingBytes: + rv.append(self._readStream(remainingBytes)) + + return b"".join(rv) + + +def HTMLInputStream(source, **kwargs): + # Work around Python bug #20007: read(0) closes the connection. + # http://bugs.python.org/issue20007 + if (isinstance(source, http_client.HTTPResponse) or + # Also check for addinfourl wrapping HTTPResponse + (isinstance(source, urllib.response.addbase) and + isinstance(source.fp, http_client.HTTPResponse))): + isUnicode = False + elif hasattr(source, "read"): + isUnicode = isinstance(source.read(0), text_type) + else: + isUnicode = isinstance(source, text_type) + + if isUnicode: + encodings = [x for x in kwargs if x.endswith("_encoding")] + if encodings: + raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) + + return HTMLUnicodeInputStream(source, **kwargs) + else: + return HTMLBinaryInputStream(source, **kwargs) + + +class HTMLUnicodeInputStream(object): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + _defaultChunkSize = 10240 + + def __init__(self, source): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + + if not _utils.supports_lone_surrogates: + # Such platforms will have already checked for such + # surrogate errors, so no need to do this checking. + self.reportCharacterErrors = None + elif len("\U0010FFFF") == 1: + self.reportCharacterErrors = self.characterErrorsUCS4 + else: + self.reportCharacterErrors = self.characterErrorsUCS2 + + # List of where new lines occur + self.newLines = [0] + + self.charEncoding = (lookupEncoding("utf-8"), "certain") + self.dataStream = self.openStream(source) + + self.reset() + + def reset(self): + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + self.errors = [] + + # number of (complete) lines in previous chunks + self.prevNumLines = 0 + # number of columns in the last line of the previous chunk + self.prevNumCols = 0 + + # Deal with CR LF and surrogates split over chunk boundaries + self._bufferedCharacter = None + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = StringIO(source) + + return stream + + def _position(self, offset): + chunk = self.chunk + nLines = chunk.count('\n', 0, offset) + positionLine = self.prevNumLines + nLines + lastLinePos = chunk.rfind('\n', 0, offset) + if lastLinePos == -1: + positionColumn = self.prevNumCols + offset + else: + positionColumn = offset - (lastLinePos + 1) + return (positionLine, positionColumn) + + def position(self): + """Returns (line, col) of the current position in the stream.""" + line, col = self._position(self.chunkOffset) + return (line + 1, col) + + def char(self): + """ Read one character from the stream or queue if available. Return + EOF when EOF is reached. + """ + # Read a new chunk from the input stream if necessary + if self.chunkOffset >= self.chunkSize: + if not self.readChunk(): + return EOF + + chunkOffset = self.chunkOffset + char = self.chunk[chunkOffset] + self.chunkOffset = chunkOffset + 1 + + return char + + def readChunk(self, chunkSize=None): + if chunkSize is None: + chunkSize = self._defaultChunkSize + + self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) + + self.chunk = "" + self.chunkSize = 0 + self.chunkOffset = 0 + + data = self.dataStream.read(chunkSize) + + # Deal with CR LF and surrogates broken across chunks + if self._bufferedCharacter: + data = self._bufferedCharacter + data + self._bufferedCharacter = None + elif not data: + # We have no more data, bye-bye stream + return False + + if len(data) > 1: + lastv = ord(data[-1]) + if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: + self._bufferedCharacter = data[-1] + data = data[:-1] + + if self.reportCharacterErrors: + self.reportCharacterErrors(data) + + # Replace invalid characters + data = data.replace("\r\n", "\n") + data = data.replace("\r", "\n") + + self.chunk = data + self.chunkSize = len(data) + + return True + + def characterErrorsUCS4(self, data): + for _ in range(len(invalid_unicode_re.findall(data))): + self.errors.append("invalid-codepoint") + + def characterErrorsUCS2(self, data): + # Someone picked the wrong compile option + # You lose + skip = False + for match in invalid_unicode_re.finditer(data): + if skip: + continue + codepoint = ord(match.group()) + pos = match.start() + # Pretty sure there should be endianness issues here + if _utils.isSurrogatePair(data[pos:pos + 2]): + # We have a surrogate pair! + char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) + if char_val in non_bmp_invalid_codepoints: + self.errors.append("invalid-codepoint") + skip = True + elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and + pos == len(data) - 1): + self.errors.append("invalid-codepoint") + else: + skip = False + self.errors.append("invalid-codepoint") + + def charsUntil(self, characters, opposite=False): + """ Returns a string of characters from the stream up to but not + including any character in 'characters' or EOF. 'characters' must be + a container that supports the 'in' method and iteration over its + characters. + """ + + # Use a cache of regexps to find the required characters + try: + chars = charsUntilRegEx[(characters, opposite)] + except KeyError: + if __debug__: + for c in characters: + assert(ord(c) < 128) + regex = "".join(["\\x%02x" % ord(c) for c in characters]) + if not opposite: + regex = "^%s" % regex + chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) + + rv = [] + + while True: + # Find the longest matching prefix + m = chars.match(self.chunk, self.chunkOffset) + if m is None: + # If nothing matched, and it wasn't because we ran out of chunk, + # then stop + if self.chunkOffset != self.chunkSize: + break + else: + end = m.end() + # If not the whole chunk matched, return everything + # up to the part that didn't match + if end != self.chunkSize: + rv.append(self.chunk[self.chunkOffset:end]) + self.chunkOffset = end + break + # If the whole remainder of the chunk matched, + # use it all and read the next chunk + rv.append(self.chunk[self.chunkOffset:]) + if not self.readChunk(): + # Reached EOF + break + + r = "".join(rv) + return r + + def unget(self, char): + # Only one character is allowed to be ungotten at once - it must + # be consumed again before any further call to unget + if char is not EOF: + if self.chunkOffset == 0: + # unget is called quite rarely, so it's a good idea to do + # more work here if it saves a bit of work in the frequently + # called char and charsUntil. + # So, just prepend the ungotten character onto the current + # chunk: + self.chunk = char + self.chunk + self.chunkSize += 1 + else: + self.chunkOffset -= 1 + assert self.chunk[self.chunkOffset] == char + + +class HTMLBinaryInputStream(HTMLUnicodeInputStream): + """Provides a unicode stream of characters to the HTMLTokenizer. + + This class takes care of character encoding and removing or replacing + incorrect byte-sequences and also provides column and line tracking. + + """ + + def __init__(self, source, override_encoding=None, transport_encoding=None, + same_origin_parent_encoding=None, likely_encoding=None, + default_encoding="windows-1252", useChardet=True): + """Initialises the HTMLInputStream. + + HTMLInputStream(source, [encoding]) -> Normalized stream from source + for use by html5lib. + + source can be either a file-object, local filename or a string. + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + """ + # Raw Stream - for unicode objects this will encode to utf-8 and set + # self.charEncoding as appropriate + self.rawStream = self.openStream(source) + + HTMLUnicodeInputStream.__init__(self, self.rawStream) + + # Encoding Information + # Number of bytes to use when looking for a meta element with + # encoding information + self.numBytesMeta = 1024 + # Number of bytes to use when using detecting encoding using chardet + self.numBytesChardet = 100 + # Things from args + self.override_encoding = override_encoding + self.transport_encoding = transport_encoding + self.same_origin_parent_encoding = same_origin_parent_encoding + self.likely_encoding = likely_encoding + self.default_encoding = default_encoding + + # Determine encoding + self.charEncoding = self.determineEncoding(useChardet) + assert self.charEncoding[0] is not None + + # Call superclass + self.reset() + + def reset(self): + self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') + HTMLUnicodeInputStream.reset(self) + + def openStream(self, source): + """Produces a file object from source. + + source can be either a file object, local filename or a string. + + """ + # Already a file object + if hasattr(source, 'read'): + stream = source + else: + stream = BytesIO(source) + + try: + stream.seek(stream.tell()) + except Exception: + stream = BufferedStream(stream) + + return stream + + def determineEncoding(self, chardet=True): + # BOMs take precedence over everything + # This will also read past the BOM if present + charEncoding = self.detectBOM(), "certain" + if charEncoding[0] is not None: + return charEncoding + + # If we've been overridden, we've been overridden + charEncoding = lookupEncoding(self.override_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Now check the transport layer + charEncoding = lookupEncoding(self.transport_encoding), "certain" + if charEncoding[0] is not None: + return charEncoding + + # Look for meta elements with encoding information + charEncoding = self.detectEncodingMeta(), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Parent document encoding + charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" + if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): + return charEncoding + + # "likely" encoding + charEncoding = lookupEncoding(self.likely_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Guess with chardet, if available + if chardet: + try: + from pip._vendor.chardet.universaldetector import UniversalDetector + except ImportError: + pass + else: + buffers = [] + detector = UniversalDetector() + while not detector.done: + buffer = self.rawStream.read(self.numBytesChardet) + assert isinstance(buffer, bytes) + if not buffer: + break + buffers.append(buffer) + detector.feed(buffer) + detector.close() + encoding = lookupEncoding(detector.result['encoding']) + self.rawStream.seek(0) + if encoding is not None: + return encoding, "tentative" + + # Try the default encoding + charEncoding = lookupEncoding(self.default_encoding), "tentative" + if charEncoding[0] is not None: + return charEncoding + + # Fallback to html5lib's default if even that hasn't worked + return lookupEncoding("windows-1252"), "tentative" + + def changeEncoding(self, newEncoding): + assert self.charEncoding[1] != "certain" + newEncoding = lookupEncoding(newEncoding) + if newEncoding is None: + return + if newEncoding.name in ("utf-16be", "utf-16le"): + newEncoding = lookupEncoding("utf-8") + assert newEncoding is not None + elif newEncoding == self.charEncoding[0]: + self.charEncoding = (self.charEncoding[0], "certain") + else: + self.rawStream.seek(0) + self.charEncoding = (newEncoding, "certain") + self.reset() + raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) + + def detectBOM(self): + """Attempts to detect at BOM at the start of the stream. If + an encoding can be determined from the BOM return the name of the + encoding otherwise return None""" + bomDict = { + codecs.BOM_UTF8: 'utf-8', + codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', + codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' + } + + # Go to beginning of file and read in 4 bytes + string = self.rawStream.read(4) + assert isinstance(string, bytes) + + # Try detecting the BOM using bytes from the string + encoding = bomDict.get(string[:3]) # UTF-8 + seek = 3 + if not encoding: + # Need to detect UTF-32 before UTF-16 + encoding = bomDict.get(string) # UTF-32 + seek = 4 + if not encoding: + encoding = bomDict.get(string[:2]) # UTF-16 + seek = 2 + + # Set the read position past the BOM if one was found, otherwise + # set it to the start of the stream + if encoding: + self.rawStream.seek(seek) + return lookupEncoding(encoding) + else: + self.rawStream.seek(0) + return None + + def detectEncodingMeta(self): + """Report the encoding declared by the meta element + """ + buffer = self.rawStream.read(self.numBytesMeta) + assert isinstance(buffer, bytes) + parser = EncodingParser(buffer) + self.rawStream.seek(0) + encoding = parser.getEncoding() + + if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): + encoding = lookupEncoding("utf-8") + + return encoding + + +class EncodingBytes(bytes): + """String-like object with an associated position and various extra methods + If the position is ever greater than the string length then an exception is + raised""" + def __new__(self, value): + assert isinstance(value, bytes) + return bytes.__new__(self, value.lower()) + + def __init__(self, value): + # pylint:disable=unused-argument + self._position = -1 + + def __iter__(self): + return self + + def __next__(self): + p = self._position = self._position + 1 + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + return self[p:p + 1] + + def next(self): + # Py2 compat + return self.__next__() + + def previous(self): + p = self._position + if p >= len(self): + raise StopIteration + elif p < 0: + raise TypeError + self._position = p = p - 1 + return self[p:p + 1] + + def setPosition(self, position): + if self._position >= len(self): + raise StopIteration + self._position = position + + def getPosition(self): + if self._position >= len(self): + raise StopIteration + if self._position >= 0: + return self._position + else: + return None + + position = property(getPosition, setPosition) + + def getCurrentByte(self): + return self[self.position:self.position + 1] + + currentByte = property(getCurrentByte) + + def skip(self, chars=spaceCharactersBytes): + """Skip past a list of characters""" + p = self.position # use property for the error-checking + while p < len(self): + c = self[p:p + 1] + if c not in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def skipUntil(self, chars): + p = self.position + while p < len(self): + c = self[p:p + 1] + if c in chars: + self._position = p + return c + p += 1 + self._position = p + return None + + def matchBytes(self, bytes): + """Look for a sequence of bytes at the start of a string. If the bytes + are found return True and advance the position to the byte after the + match. Otherwise return False and leave the position alone""" + rv = self.startswith(bytes, self.position) + if rv: + self.position += len(bytes) + return rv + + def jumpTo(self, bytes): + """Look for the next sequence of bytes matching a given sequence. If + a match is found advance the position to the last byte of the match""" + try: + self._position = self.index(bytes, self.position) + len(bytes) - 1 + except ValueError: + raise StopIteration + return True + + +class EncodingParser(object): + """Mini parser for detecting character encoding from meta elements""" + + def __init__(self, data): + """string - the data to work on for encoding detection""" + self.data = EncodingBytes(data) + self.encoding = None + + def getEncoding(self): + if b"") + + def handleMeta(self): + if self.data.currentByte not in spaceCharactersBytes: + # if we have ") + + def getAttribute(self): + """Return a name,value pair for the next attribute in the stream, + if one is found, or None""" + data = self.data + # Step 1 (skip chars) + c = data.skip(spaceCharactersBytes | frozenset([b"/"])) + assert c is None or len(c) == 1 + # Step 2 + if c in (b">", None): + return None + # Step 3 + attrName = [] + attrValue = [] + # Step 4 attribute name + while True: + if c == b"=" and attrName: + break + elif c in spaceCharactersBytes: + # Step 6! + c = data.skip() + break + elif c in (b"/", b">"): + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrName.append(c.lower()) + elif c is None: + return None + else: + attrName.append(c) + # Step 5 + c = next(data) + # Step 7 + if c != b"=": + data.previous() + return b"".join(attrName), b"" + # Step 8 + next(data) + # Step 9 + c = data.skip() + # Step 10 + if c in (b"'", b'"'): + # 10.1 + quoteChar = c + while True: + # 10.2 + c = next(data) + # 10.3 + if c == quoteChar: + next(data) + return b"".join(attrName), b"".join(attrValue) + # 10.4 + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + # 10.5 + else: + attrValue.append(c) + elif c == b">": + return b"".join(attrName), b"" + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + # Step 11 + while True: + c = next(data) + if c in spacesAngleBrackets: + return b"".join(attrName), b"".join(attrValue) + elif c in asciiUppercaseBytes: + attrValue.append(c.lower()) + elif c is None: + return None + else: + attrValue.append(c) + + +class ContentAttrParser(object): + def __init__(self, data): + assert isinstance(data, bytes) + self.data = data + + def parse(self): + try: + # Check if the attr name is charset + # otherwise return + self.data.jumpTo(b"charset") + self.data.position += 1 + self.data.skip() + if not self.data.currentByte == b"=": + # If there is no = sign keep looking for attrs + return None + self.data.position += 1 + self.data.skip() + # Look for an encoding between matching quote marks + if self.data.currentByte in (b'"', b"'"): + quoteMark = self.data.currentByte + self.data.position += 1 + oldPosition = self.data.position + if self.data.jumpTo(quoteMark): + return self.data[oldPosition:self.data.position] + else: + return None + else: + # Unquoted value + oldPosition = self.data.position + try: + self.data.skipUntil(spaceCharactersBytes) + return self.data[oldPosition:self.data.position] + except StopIteration: + # Return the whole remaining value + return self.data[oldPosition:] + except StopIteration: + return None + + +def lookupEncoding(encoding): + """Return the python codec name corresponding to an encoding or None if the + string doesn't correspond to a valid encoding.""" + if isinstance(encoding, bytes): + try: + encoding = encoding.decode("ascii") + except UnicodeDecodeError: + return None + + if encoding is not None: + try: + return webencodings.lookup(encoding) + except AttributeError: + return None + else: + return None diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_tokenizer.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_tokenizer.py new file mode 100644 index 000000000..5f00253e2 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_tokenizer.py @@ -0,0 +1,1735 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import unichr as chr + +from collections import deque, OrderedDict +from sys import version_info + +from .constants import spaceCharacters +from .constants import entities +from .constants import asciiLetters, asciiUpper2Lower +from .constants import digits, hexDigits, EOF +from .constants import tokenTypes, tagTokenTypes +from .constants import replacementCharacters + +from ._inputstream import HTMLInputStream + +from ._trie import Trie + +entitiesTrie = Trie(entities) + +if version_info >= (3, 7): + attributeMap = dict +else: + attributeMap = OrderedDict + + +class HTMLTokenizer(object): + """ This class takes care of tokenizing HTML. + + * self.currentToken + Holds the token that is currently being processed. + + * self.state + Holds a reference to the method to be invoked... XXX + + * self.stream + Points to HTMLInputStream object. + """ + + def __init__(self, stream, parser=None, **kwargs): + + self.stream = HTMLInputStream(stream, **kwargs) + self.parser = parser + + # Setup the initial tokenizer state + self.escapeFlag = False + self.lastFourChars = [] + self.state = self.dataState + self.escape = False + + # The current token being created + self.currentToken = None + super(HTMLTokenizer, self).__init__() + + def __iter__(self): + """ This is where the magic happens. + + We do our usually processing through the states and when we have a token + to return we yield the token which pauses processing until the next token + is requested. + """ + self.tokenQueue = deque([]) + # Start processing. When EOF is reached self.state will return False + # instead of True and the loop will terminate. + while self.state(): + while self.stream.errors: + yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} + while self.tokenQueue: + yield self.tokenQueue.popleft() + + def consumeNumberEntity(self, isHex): + """This function returns either U+FFFD or the character based on the + decimal or hexadecimal representation. It also discards ";" if present. + If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. + """ + + allowed = digits + radix = 10 + if isHex: + allowed = hexDigits + radix = 16 + + charStack = [] + + # Consume all the characters that are in range while making sure we + # don't hit an EOF. + c = self.stream.char() + while c in allowed and c is not EOF: + charStack.append(c) + c = self.stream.char() + + # Convert the set of characters consumed to an int. + charAsInt = int("".join(charStack), radix) + + # Certain characters get replaced with others + if charAsInt in replacementCharacters: + char = replacementCharacters[charAsInt] + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + elif ((0xD800 <= charAsInt <= 0xDFFF) or + (charAsInt > 0x10FFFF)): + char = "\uFFFD" + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + else: + # Should speed up this check somehow (e.g. move the set to a constant) + if ((0x0001 <= charAsInt <= 0x0008) or + (0x000E <= charAsInt <= 0x001F) or + (0x007F <= charAsInt <= 0x009F) or + (0xFDD0 <= charAsInt <= 0xFDEF) or + charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, + 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, + 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, + 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, + 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, + 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, + 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, + 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, + 0xFFFFF, 0x10FFFE, 0x10FFFF])): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "illegal-codepoint-for-numeric-entity", + "datavars": {"charAsInt": charAsInt}}) + try: + # Try/except needed as UCS-2 Python builds' unichar only works + # within the BMP. + char = chr(charAsInt) + except ValueError: + v = charAsInt - 0x10000 + char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) + + # Discard the ; if present. Otherwise, put it back on the queue and + # invoke parseError on parser. + if c != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "numeric-entity-without-semicolon"}) + self.stream.unget(c) + + return char + + def consumeEntity(self, allowedChar=None, fromAttribute=False): + # Initialise to the default output for when no entity is matched + output = "&" + + charStack = [self.stream.char()] + if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or + (allowedChar is not None and allowedChar == charStack[0])): + self.stream.unget(charStack[0]) + + elif charStack[0] == "#": + # Read the next character to see if it's hex or decimal + hex = False + charStack.append(self.stream.char()) + if charStack[-1] in ("x", "X"): + hex = True + charStack.append(self.stream.char()) + + # charStack[-1] should be the first digit + if (hex and charStack[-1] in hexDigits) \ + or (not hex and charStack[-1] in digits): + # At least one digit found, so consume the whole number + self.stream.unget(charStack[-1]) + output = self.consumeNumberEntity(hex) + else: + # No digits found + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "expected-numeric-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + else: + # At this point in the process might have named entity. Entities + # are stored in the global variable "entities". + # + # Consume characters and compare to these to a substring of the + # entity names in the list until the substring no longer matches. + while (charStack[-1] is not EOF): + if not entitiesTrie.has_keys_with_prefix("".join(charStack)): + break + charStack.append(self.stream.char()) + + # At this point we have a string that starts with some characters + # that may match an entity + # Try to find the longest entity the string will match to take care + # of ¬i for instance. + try: + entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) + entityLength = len(entityName) + except KeyError: + entityName = None + + if entityName is not None: + if entityName[-1] != ";": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "named-entity-without-semicolon"}) + if (entityName[-1] != ";" and fromAttribute and + (charStack[entityLength] in asciiLetters or + charStack[entityLength] in digits or + charStack[entityLength] == "=")): + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + else: + output = entities[entityName] + self.stream.unget(charStack.pop()) + output += "".join(charStack[entityLength:]) + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-named-entity"}) + self.stream.unget(charStack.pop()) + output = "&" + "".join(charStack) + + if fromAttribute: + self.currentToken["data"][-1][1] += output + else: + if output in spaceCharacters: + tokenType = "SpaceCharacters" + else: + tokenType = "Characters" + self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) + + def processEntityInAttribute(self, allowedChar): + """This method replaces the need for "entityInAttributeValueState". + """ + self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) + + def emitCurrentToken(self): + """This method is a generic handler for emitting the tags. It also sets + the state to "data" because that's what's needed after a token has been + emitted. + """ + token = self.currentToken + # Add token to the queue to be yielded + if (token["type"] in tagTokenTypes): + token["name"] = token["name"].translate(asciiUpper2Lower) + if token["type"] == tokenTypes["StartTag"]: + raw = token["data"] + data = attributeMap(raw) + if len(raw) > len(data): + # we had some duplicated attribute, fix so first wins + data.update(raw[::-1]) + token["data"] = data + + if token["type"] == tokenTypes["EndTag"]: + if token["data"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "attributes-in-end-tag"}) + if token["selfClosing"]: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "self-closing-flag-on-end-tag"}) + self.tokenQueue.append(token) + self.state = self.dataState + + # Below are the various tokenizer states worked out. + def dataState(self): + data = self.stream.char() + if data == "&": + self.state = self.entityDataState + elif data == "<": + self.state = self.tagOpenState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\u0000"}) + elif data is EOF: + # Tokenization ends. + return False + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def entityDataState(self): + self.consumeEntity() + self.state = self.dataState + return True + + def rcdataState(self): + data = self.stream.char() + if data == "&": + self.state = self.characterReferenceInRcdata + elif data == "<": + self.state = self.rcdataLessThanSignState + elif data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data in spaceCharacters: + # Directly after emitting a token you switch back to the "data + # state". At that point spaceCharacters are important so they are + # emitted separately. + self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": + data + self.stream.charsUntil(spaceCharacters, True)}) + # No need to update lastFourChars here, since the first space will + # have already been appended to lastFourChars and will have broken + # any sequences + else: + chars = self.stream.charsUntil(("&", "<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def characterReferenceInRcdata(self): + self.consumeEntity() + self.state = self.rcdataState + return True + + def rawtextState(self): + data = self.stream.char() + if data == "<": + self.state = self.rawtextLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def scriptDataState(self): + data = self.stream.char() + if data == "<": + self.state = self.scriptDataLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + # Tokenization ends. + return False + else: + chars = self.stream.charsUntil(("<", "\u0000")) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + chars}) + return True + + def plaintextState(self): + data = self.stream.char() + if data == EOF: + # Tokenization ends. + return False + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": + data + self.stream.charsUntil("\u0000")}) + return True + + def tagOpenState(self): + data = self.stream.char() + if data == "!": + self.state = self.markupDeclarationOpenState + elif data == "/": + self.state = self.closeTagOpenState + elif data in asciiLetters: + self.currentToken = {"type": tokenTypes["StartTag"], + "name": data, "data": [], + "selfClosing": False, + "selfClosingAcknowledged": False} + self.state = self.tagNameState + elif data == ">": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-right-bracket"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) + self.state = self.dataState + elif data == "?": + # XXX In theory it could be something besides a tag name. But + # do we really care? + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name-but-got-question-mark"}) + self.stream.unget(data) + self.state = self.bogusCommentState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-tag-name"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.dataState + return True + + def closeTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.currentToken = {"type": tokenTypes["EndTag"], "name": data, + "data": [], "selfClosing": False} + self.state = self.tagNameState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-right-bracket"}) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-closing-tag-but-got-eof"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "": + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-tag-name"}) + self.state = self.dataState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + else: + self.currentToken["name"] += data + # (Don't use charsUntil here, because tag names are + # very short and it's faster to not do anything fancy) + return True + + def rcdataLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.rcdataEndTagOpenState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.rcdataState + return True + + def rcdataEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer += data + self.state = self.rcdataEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataEscapedState + elif data == EOF: + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.temporaryBuffer = "" + self.state = self.scriptDataEscapedEndTagOpenState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) + self.temporaryBuffer = data + self.state = self.scriptDataDoubleEscapeStartState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataEscapedEndTagOpenState(self): + data = self.stream.char() + if data in asciiLetters: + self.temporaryBuffer = data + self.state = self.scriptDataEscapedEndTagNameState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: + self.currentToken = {"type": tokenTypes["EndTag"], + "name": self.temporaryBuffer, + "data": [], "selfClosing": False} + self.emitCurrentToken() + self.state = self.dataState + elif data in asciiLetters: + self.temporaryBuffer += data + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": ""))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataDoubleEscapedState + else: + self.state = self.scriptDataEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataEscapedState + return True + + def scriptDataDoubleEscapedState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + return True + + def scriptDataDoubleEscapedDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + self.state = self.scriptDataDoubleEscapedDashDashState + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedDashDashState(self): + data = self.stream.char() + if data == "-": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) + elif data == "<": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) + self.state = self.scriptDataDoubleEscapedLessThanSignState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) + self.state = self.scriptDataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": "\uFFFD"}) + self.state = self.scriptDataDoubleEscapedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-script-in-script"}) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapedLessThanSignState(self): + data = self.stream.char() + if data == "/": + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) + self.temporaryBuffer = "" + self.state = self.scriptDataDoubleEscapeEndState + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def scriptDataDoubleEscapeEndState(self): + data = self.stream.char() + if data in (spaceCharacters | frozenset(("/", ">"))): + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + if self.temporaryBuffer.lower() == "script": + self.state = self.scriptDataEscapedState + else: + self.state = self.scriptDataDoubleEscapedState + elif data in asciiLetters: + self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) + self.temporaryBuffer += data + else: + self.stream.unget(data) + self.state = self.scriptDataDoubleEscapedState + return True + + def beforeAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data in ("'", '"', "=", "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-name-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def attributeNameState(self): + data = self.stream.char() + leavingThisState = True + emitToken = False + if data == "=": + self.state = self.beforeAttributeValueState + elif data in asciiLetters: + self.currentToken["data"][-1][0] += data +\ + self.stream.charsUntil(asciiLetters, True) + leavingThisState = False + elif data == ">": + # XXX If we emit here the attributes are converted to a dict + # without being checked and when the code below runs we error + # because data is a dict not a list + emitToken = True + elif data in spaceCharacters: + self.state = self.afterAttributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][0] += "\uFFFD" + leavingThisState = False + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "invalid-character-in-attribute-name"}) + self.currentToken["data"][-1][0] += data + leavingThisState = False + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-attribute-name"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][0] += data + leavingThisState = False + + if leavingThisState: + # Attributes are not dropped at this stage. That happens when the + # start tag token is emitted so values can still be safely appended + # to attributes, but we do want to report the parse error in time. + self.currentToken["data"][-1][0] = ( + self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) + for name, _ in self.currentToken["data"][:-1]: + if self.currentToken["data"][-1][0] == name: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "duplicate-attribute"}) + break + # XXX Fix for above XXX + if emitToken: + self.emitCurrentToken() + return True + + def afterAttributeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "=": + self.state = self.beforeAttributeValueState + elif data == ">": + self.emitCurrentToken() + elif data in asciiLetters: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data == "/": + self.state = self.selfClosingStartTagState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"].append(["\uFFFD", ""]) + self.state = self.attributeNameState + elif data in ("'", '"', "<"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "invalid-character-after-attribute-name"}) + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-end-of-tag-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"].append([data, ""]) + self.state = self.attributeNameState + return True + + def beforeAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.stream.charsUntil(spaceCharacters, True) + elif data == "\"": + self.state = self.attributeValueDoubleQuotedState + elif data == "&": + self.state = self.attributeValueUnQuotedState + self.stream.unget(data) + elif data == "'": + self.state = self.attributeValueSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-right-bracket"}) + self.emitCurrentToken() + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + self.state = self.attributeValueUnQuotedState + elif data in ("=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "equals-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-attribute-value-but-got-eof"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.state = self.attributeValueUnQuotedState + return True + + def attributeValueDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute('"') + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-double-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("\"", "&", "\u0000")) + return True + + def attributeValueSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterAttributeValueState + elif data == "&": + self.processEntityInAttribute("'") + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-single-quote"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data +\ + self.stream.charsUntil(("'", "&", "\u0000")) + return True + + def attributeValueUnQuotedState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == "&": + self.processEntityInAttribute(">") + elif data == ">": + self.emitCurrentToken() + elif data in ('"', "'", "=", "<", "`"): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-in-unquoted-attribute-value"}) + self.currentToken["data"][-1][1] += data + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"][-1][1] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-attribute-value-no-quotes"}) + self.state = self.dataState + else: + self.currentToken["data"][-1][1] += data + self.stream.charsUntil( + frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) + return True + + def afterAttributeValueState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeAttributeNameState + elif data == ">": + self.emitCurrentToken() + elif data == "/": + self.state = self.selfClosingStartTagState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-EOF-after-attribute-value"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-attribute-value"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def selfClosingStartTagState(self): + data = self.stream.char() + if data == ">": + self.currentToken["selfClosing"] = True + self.emitCurrentToken() + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": + "unexpected-EOF-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-character-after-solidus-in-tag"}) + self.stream.unget(data) + self.state = self.beforeAttributeNameState + return True + + def bogusCommentState(self): + # Make a new comment token and give it as value all the characters + # until the first > or EOF (charsUntil checks for EOF automatically) + # and emit it. + data = self.stream.charsUntil(">") + data = data.replace("\u0000", "\uFFFD") + self.tokenQueue.append( + {"type": tokenTypes["Comment"], "data": data}) + + # Eat the character directly after the bogus comment which is either a + # ">" or an EOF. + self.stream.char() + self.state = self.dataState + return True + + def markupDeclarationOpenState(self): + charStack = [self.stream.char()] + if charStack[-1] == "-": + charStack.append(self.stream.char()) + if charStack[-1] == "-": + self.currentToken = {"type": tokenTypes["Comment"], "data": ""} + self.state = self.commentStartState + return True + elif charStack[-1] in ('d', 'D'): + matched = True + for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), + ('y', 'Y'), ('p', 'P'), ('e', 'E')): + charStack.append(self.stream.char()) + if charStack[-1] not in expected: + matched = False + break + if matched: + self.currentToken = {"type": tokenTypes["Doctype"], + "name": "", + "publicId": None, "systemId": None, + "correct": True} + self.state = self.doctypeState + return True + elif (charStack[-1] == "[" and + self.parser is not None and + self.parser.tree.openElements and + self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): + matched = True + for expected in ["C", "D", "A", "T", "A", "["]: + charStack.append(self.stream.char()) + if charStack[-1] != expected: + matched = False + break + if matched: + self.state = self.cdataSectionState + return True + + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-dashes-or-doctype"}) + + while charStack: + self.stream.unget(charStack.pop()) + self.state = self.bogusCommentState + return True + + def commentStartState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentStartDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + self.state = self.commentState + return True + + def commentStartDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "incorrect-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "\uFFFD" + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "eof-in-comment"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += data + \ + self.stream.charsUntil(("-", "\u0000")) + return True + + def commentEndDashState(self): + data = self.stream.char() + if data == "-": + self.state = self.commentEndState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "-\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "-" + data + self.state = self.commentState + return True + + def commentEndState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--\uFFFD" + self.state = self.commentState + elif data == "!": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-bang-after-double-dash-in-comment"}) + self.state = self.commentEndBangState + elif data == "-": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-dash-after-double-dash-in-comment"}) + self.currentToken["data"] += data + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-double-dash"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + # XXX + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-comment"}) + self.currentToken["data"] += "--" + data + self.state = self.commentState + return True + + def commentEndBangState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "-": + self.currentToken["data"] += "--!" + self.state = self.commentEndDashState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["data"] += "--!\uFFFD" + self.state = self.commentState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-comment-end-bang-state"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["data"] += "--!" + data + self.state = self.commentState + return True + + def doctypeState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "need-space-after-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeNameState + return True + + def beforeDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-right-bracket"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] = "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-doctype-name-but-got-eof"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] = data + self.state = self.doctypeNameState + return True + + def doctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.state = self.afterDoctypeNameState + elif data == ">": + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["name"] += "\uFFFD" + self.state = self.doctypeNameState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype-name"}) + self.currentToken["correct"] = False + self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["name"] += data + return True + + def afterDoctypeNameState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.currentToken["correct"] = False + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + if data in ("p", "P"): + matched = True + for expected in (("u", "U"), ("b", "B"), ("l", "L"), + ("i", "I"), ("c", "C")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypePublicKeywordState + return True + elif data in ("s", "S"): + matched = True + for expected in (("y", "Y"), ("s", "S"), ("t", "T"), + ("e", "E"), ("m", "M")): + data = self.stream.char() + if data not in expected: + matched = False + break + if matched: + self.state = self.afterDoctypeSystemKeywordState + return True + + # All the characters read before the current 'data' will be + # [a-zA-Z], so they're garbage in the bogus doctype and can be + # discarded; only the latest character might be '>' or EOF + # and needs to be ungetted + self.stream.unget(data) + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "expected-space-or-right-bracket-in-doctype", "datavars": + {"data": data}}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + + return True + + def afterDoctypePublicKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypePublicIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypePublicIdentifierState + return True + + def beforeDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["publicId"] = "" + self.state = self.doctypePublicIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypePublicIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def doctypePublicIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypePublicIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["publicId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["publicId"] += data + return True + + def afterDoctypePublicIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.betweenDoctypePublicAndSystemIdentifiersState + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def betweenDoctypePublicAndSystemIdentifiersState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data == '"': + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def afterDoctypeSystemKeywordState(self): + data = self.stream.char() + if data in spaceCharacters: + self.state = self.beforeDoctypeSystemIdentifierState + elif data in ("'", '"'): + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.stream.unget(data) + self.state = self.beforeDoctypeSystemIdentifierState + return True + + def beforeDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == "\"": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierDoubleQuotedState + elif data == "'": + self.currentToken["systemId"] = "" + self.state = self.doctypeSystemIdentifierSingleQuotedState + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.currentToken["correct"] = False + self.state = self.bogusDoctypeState + return True + + def doctypeSystemIdentifierDoubleQuotedState(self): + data = self.stream.char() + if data == "\"": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def doctypeSystemIdentifierSingleQuotedState(self): + data = self.stream.char() + if data == "'": + self.state = self.afterDoctypeSystemIdentifierState + elif data == "\u0000": + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + self.currentToken["systemId"] += "\uFFFD" + elif data == ">": + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-end-of-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.currentToken["systemId"] += data + return True + + def afterDoctypeSystemIdentifierState(self): + data = self.stream.char() + if data in spaceCharacters: + pass + elif data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "eof-in-doctype"}) + self.currentToken["correct"] = False + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": + "unexpected-char-in-doctype"}) + self.state = self.bogusDoctypeState + return True + + def bogusDoctypeState(self): + data = self.stream.char() + if data == ">": + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + elif data is EOF: + # XXX EMIT + self.stream.unget(data) + self.tokenQueue.append(self.currentToken) + self.state = self.dataState + else: + pass + return True + + def cdataSectionState(self): + data = [] + while True: + data.append(self.stream.charsUntil("]")) + data.append(self.stream.charsUntil(">")) + char = self.stream.char() + if char == EOF: + break + else: + assert char == ">" + if data[-1][-2:] == "]]": + data[-1] = data[-1][:-2] + break + else: + data.append(char) + + data = "".join(data) # pylint:disable=redefined-variable-type + # Deal with null here rather than in the parser + nullCount = data.count("\u0000") + if nullCount > 0: + for _ in range(nullCount): + self.tokenQueue.append({"type": tokenTypes["ParseError"], + "data": "invalid-codepoint"}) + data = data.replace("\u0000", "\uFFFD") + if data: + self.tokenQueue.append({"type": tokenTypes["Characters"], + "data": data}) + self.state = self.dataState + return True diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__init__.py new file mode 100644 index 000000000..07bad5d31 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import, division, unicode_literals + +from .py import Trie + +__all__ = ["Trie"] diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..60ff777b3 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-310.pyc new file mode 100644 index 000000000..4c3d03e42 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-310.pyc new file mode 100644 index 000000000..5903bfcae Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/_base.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/_base.py new file mode 100644 index 000000000..6b71975f0 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/_base.py @@ -0,0 +1,40 @@ +from __future__ import absolute_import, division, unicode_literals + +try: + from collections.abc import Mapping +except ImportError: # Python 2.7 + from collections import Mapping + + +class Trie(Mapping): + """Abstract base class for tries""" + + def keys(self, prefix=None): + # pylint:disable=arguments-differ + keys = super(Trie, self).keys() + + if prefix is None: + return set(keys) + + return {x for x in keys if x.startswith(prefix)} + + def has_keys_with_prefix(self, prefix): + for key in self.keys(): + if key.startswith(prefix): + return True + + return False + + def longest_prefix(self, prefix): + if prefix in self: + return prefix + + for i in range(1, len(prefix) + 1): + if prefix[:-i] in self: + return prefix[:-i] + + raise KeyError(prefix) + + def longest_prefix_item(self, prefix): + lprefix = self.longest_prefix(prefix) + return (lprefix, self[lprefix]) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/py.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/py.py new file mode 100644 index 000000000..c178b219d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_trie/py.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import text_type + +from bisect import bisect_left + +from ._base import Trie as ABCTrie + + +class Trie(ABCTrie): + def __init__(self, data): + if not all(isinstance(x, text_type) for x in data.keys()): + raise TypeError("All keys must be strings") + + self._data = data + self._keys = sorted(data.keys()) + self._cachestr = "" + self._cachepoints = (0, len(data)) + + def __contains__(self, key): + return key in self._data + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + def __getitem__(self, key): + return self._data[key] + + def keys(self, prefix=None): + if prefix is None or prefix == "" or not self._keys: + return set(self._keys) + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + start = i = bisect_left(self._keys, prefix, lo, hi) + else: + start = i = bisect_left(self._keys, prefix) + + keys = set() + if start == len(self._keys): + return keys + + while self._keys[i].startswith(prefix): + keys.add(self._keys[i]) + i += 1 + + self._cachestr = prefix + self._cachepoints = (start, i) + + return keys + + def has_keys_with_prefix(self, prefix): + if prefix in self._data: + return True + + if prefix.startswith(self._cachestr): + lo, hi = self._cachepoints + i = bisect_left(self._keys, prefix, lo, hi) + else: + i = bisect_left(self._keys, prefix) + + if i == len(self._keys): + return False + + return self._keys[i].startswith(prefix) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_utils.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_utils.py new file mode 100644 index 000000000..d7c4926af --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/_utils.py @@ -0,0 +1,159 @@ +from __future__ import absolute_import, division, unicode_literals + +from types import ModuleType + +try: + from collections.abc import Mapping +except ImportError: + from collections import Mapping + +from pip._vendor.six import text_type, PY3 + +if PY3: + import xml.etree.ElementTree as default_etree +else: + try: + import xml.etree.cElementTree as default_etree + except ImportError: + import xml.etree.ElementTree as default_etree + + +__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", + "surrogatePairToCodepoint", "moduleFactoryFactory", + "supports_lone_surrogates"] + + +# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be +# caught by the below test. In general this would be any platform +# using UTF-16 as its encoding of unicode strings, such as +# Jython. This is because UTF-16 itself is based on the use of such +# surrogates, and there is no mechanism to further escape such +# escapes. +try: + _x = eval('"\\uD800"') # pylint:disable=eval-used + if not isinstance(_x, text_type): + # We need this with u"" because of http://bugs.jython.org/issue2039 + _x = eval('u"\\uD800"') # pylint:disable=eval-used + assert isinstance(_x, text_type) +except Exception: + supports_lone_surrogates = False +else: + supports_lone_surrogates = True + + +class MethodDispatcher(dict): + """Dict with 2 special properties: + + On initiation, keys that are lists, sets or tuples are converted to + multiple keys so accessing any one of the items in the original + list-like object returns the matching value + + md = MethodDispatcher({("foo", "bar"):"baz"}) + md["foo"] == "baz" + + A default value which can be set through the default attribute. + """ + + def __init__(self, items=()): + _dictEntries = [] + for name, value in items: + if isinstance(name, (list, tuple, frozenset, set)): + for item in name: + _dictEntries.append((item, value)) + else: + _dictEntries.append((name, value)) + dict.__init__(self, _dictEntries) + assert len(self) == len(_dictEntries) + self.default = None + + def __getitem__(self, key): + return dict.get(self, key, self.default) + + def __get__(self, instance, owner=None): + return BoundMethodDispatcher(instance, self) + + +class BoundMethodDispatcher(Mapping): + """Wraps a MethodDispatcher, binding its return values to `instance`""" + def __init__(self, instance, dispatcher): + self.instance = instance + self.dispatcher = dispatcher + + def __getitem__(self, key): + # see https://docs.python.org/3/reference/datamodel.html#object.__get__ + # on a function, __get__ is used to bind a function to an instance as a bound method + return self.dispatcher[key].__get__(self.instance) + + def get(self, key, default): + if key in self.dispatcher: + return self[key] + else: + return default + + def __iter__(self): + return iter(self.dispatcher) + + def __len__(self): + return len(self.dispatcher) + + def __contains__(self, key): + return key in self.dispatcher + + +# Some utility functions to deal with weirdness around UCS2 vs UCS4 +# python builds + +def isSurrogatePair(data): + return (len(data) == 2 and + ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and + ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) + + +def surrogatePairToCodepoint(data): + char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + + (ord(data[1]) - 0xDC00)) + return char_val + +# Module Factory Factory (no, this isn't Java, I know) +# Here to stop this being duplicated all over the place. + + +def moduleFactoryFactory(factory): + moduleCache = {} + + def moduleFactory(baseModule, *args, **kwargs): + if isinstance(ModuleType.__name__, type("")): + name = "_%s_factory" % baseModule.__name__ + else: + name = b"_%s_factory" % baseModule.__name__ + + kwargs_tuple = tuple(kwargs.items()) + + try: + return moduleCache[name][args][kwargs_tuple] + except KeyError: + mod = ModuleType(name) + objs = factory(baseModule, *args, **kwargs) + mod.__dict__.update(objs) + if "name" not in moduleCache: + moduleCache[name] = {} + if "args" not in moduleCache[name]: + moduleCache[name][args] = {} + if "kwargs" not in moduleCache[name][args]: + moduleCache[name][args][kwargs_tuple] = {} + moduleCache[name][args][kwargs_tuple] = mod + return mod + + return moduleFactory + + +def memoize(func): + cache = {} + + def wrapped(*args, **kwargs): + key = (tuple(args), tuple(kwargs.items())) + if key not in cache: + cache[key] = func(*args, **kwargs) + return cache[key] + + return wrapped diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/constants.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/constants.py new file mode 100644 index 000000000..fe3e237cd --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/constants.py @@ -0,0 +1,2946 @@ +from __future__ import absolute_import, division, unicode_literals + +import string + +EOF = None + +E = { + "null-character": + "Null character in input stream, replaced with U+FFFD.", + "invalid-codepoint": + "Invalid codepoint in stream.", + "incorrectly-placed-solidus": + "Solidus (/) incorrectly placed in tag.", + "incorrect-cr-newline-entity": + "Incorrect CR newline entity, replaced with LF.", + "illegal-windows-1252-entity": + "Entity used with illegal number (windows-1252 reference).", + "cant-convert-numeric-entity": + "Numeric entity couldn't be converted to character " + "(codepoint U+%(charAsInt)08x).", + "illegal-codepoint-for-numeric-entity": + "Numeric entity represents an illegal codepoint: " + "U+%(charAsInt)08x.", + "numeric-entity-without-semicolon": + "Numeric entity didn't end with ';'.", + "expected-numeric-entity-but-got-eof": + "Numeric entity expected. Got end of file instead.", + "expected-numeric-entity": + "Numeric entity expected but none found.", + "named-entity-without-semicolon": + "Named entity didn't end with ';'.", + "expected-named-entity": + "Named entity expected. Got none.", + "attributes-in-end-tag": + "End tag contains unexpected attributes.", + 'self-closing-flag-on-end-tag': + "End tag contains unexpected self-closing flag.", + "expected-tag-name-but-got-right-bracket": + "Expected tag name. Got '>' instead.", + "expected-tag-name-but-got-question-mark": + "Expected tag name. Got '?' instead. (HTML doesn't " + "support processing instructions.)", + "expected-tag-name": + "Expected tag name. Got something else instead", + "expected-closing-tag-but-got-right-bracket": + "Expected closing tag. Got '>' instead. Ignoring ''.", + "expected-closing-tag-but-got-eof": + "Expected closing tag. Unexpected end of file.", + "expected-closing-tag-but-got-char": + "Expected closing tag. Unexpected character '%(data)s' found.", + "eof-in-tag-name": + "Unexpected end of file in the tag name.", + "expected-attribute-name-but-got-eof": + "Unexpected end of file. Expected attribute name instead.", + "eof-in-attribute-name": + "Unexpected end of file in attribute name.", + "invalid-character-in-attribute-name": + "Invalid character in attribute name", + "duplicate-attribute": + "Dropped duplicate attribute on tag.", + "expected-end-of-tag-name-but-got-eof": + "Unexpected end of file. Expected = or end of tag.", + "expected-attribute-value-but-got-eof": + "Unexpected end of file. Expected attribute value.", + "expected-attribute-value-but-got-right-bracket": + "Expected attribute value. Got '>' instead.", + 'equals-in-unquoted-attribute-value': + "Unexpected = in unquoted attribute", + 'unexpected-character-in-unquoted-attribute-value': + "Unexpected character in unquoted attribute", + "invalid-character-after-attribute-name": + "Unexpected character after attribute name.", + "unexpected-character-after-attribute-value": + "Unexpected character after attribute value.", + "eof-in-attribute-value-double-quote": + "Unexpected end of file in attribute value (\").", + "eof-in-attribute-value-single-quote": + "Unexpected end of file in attribute value (').", + "eof-in-attribute-value-no-quotes": + "Unexpected end of file in attribute value.", + "unexpected-EOF-after-solidus-in-tag": + "Unexpected end of file in tag. Expected >", + "unexpected-character-after-solidus-in-tag": + "Unexpected character after / in tag. Expected >", + "expected-dashes-or-doctype": + "Expected '--' or 'DOCTYPE'. Not found.", + "unexpected-bang-after-double-dash-in-comment": + "Unexpected ! after -- in comment", + "unexpected-space-after-double-dash-in-comment": + "Unexpected space after -- in comment", + "incorrect-comment": + "Incorrect comment.", + "eof-in-comment": + "Unexpected end of file in comment.", + "eof-in-comment-end-dash": + "Unexpected end of file in comment (-)", + "unexpected-dash-after-double-dash-in-comment": + "Unexpected '-' after '--' found in comment.", + "eof-in-comment-double-dash": + "Unexpected end of file in comment (--).", + "eof-in-comment-end-space-state": + "Unexpected end of file in comment.", + "eof-in-comment-end-bang-state": + "Unexpected end of file in comment.", + "unexpected-char-in-comment": + "Unexpected character in comment found.", + "need-space-after-doctype": + "No space after literal string 'DOCTYPE'.", + "expected-doctype-name-but-got-right-bracket": + "Unexpected > character. Expected DOCTYPE name.", + "expected-doctype-name-but-got-eof": + "Unexpected end of file. Expected DOCTYPE name.", + "eof-in-doctype-name": + "Unexpected end of file in DOCTYPE name.", + "eof-in-doctype": + "Unexpected end of file in DOCTYPE.", + "expected-space-or-right-bracket-in-doctype": + "Expected space or '>'. Got '%(data)s'", + "unexpected-end-of-doctype": + "Unexpected end of DOCTYPE.", + "unexpected-char-in-doctype": + "Unexpected character in DOCTYPE.", + "eof-in-innerhtml": + "XXX innerHTML EOF", + "unexpected-doctype": + "Unexpected DOCTYPE. Ignored.", + "non-html-root": + "html needs to be the first start tag.", + "expected-doctype-but-got-eof": + "Unexpected End of file. Expected DOCTYPE.", + "unknown-doctype": + "Erroneous DOCTYPE.", + "expected-doctype-but-got-chars": + "Unexpected non-space characters. Expected DOCTYPE.", + "expected-doctype-but-got-start-tag": + "Unexpected start tag (%(name)s). Expected DOCTYPE.", + "expected-doctype-but-got-end-tag": + "Unexpected end tag (%(name)s). Expected DOCTYPE.", + "end-tag-after-implied-root": + "Unexpected end tag (%(name)s) after the (implied) root element.", + "expected-named-closing-tag-but-got-eof": + "Unexpected end of file. Expected end tag (%(name)s).", + "two-heads-are-not-better-than-one": + "Unexpected start tag head in existing head. Ignored.", + "unexpected-end-tag": + "Unexpected end tag (%(name)s). Ignored.", + "unexpected-start-tag-out-of-my-head": + "Unexpected start tag (%(name)s) that can be in head. Moved.", + "unexpected-start-tag": + "Unexpected start tag (%(name)s).", + "missing-end-tag": + "Missing end tag (%(name)s).", + "missing-end-tags": + "Missing end tags (%(name)s).", + "unexpected-start-tag-implies-end-tag": + "Unexpected start tag (%(startName)s) " + "implies end tag (%(endName)s).", + "unexpected-start-tag-treated-as": + "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", + "deprecated-tag": + "Unexpected start tag %(name)s. Don't use it!", + "unexpected-start-tag-ignored": + "Unexpected start tag %(name)s. Ignored.", + "expected-one-end-tag-but-got-another": + "Unexpected end tag (%(gotName)s). " + "Missing end tag (%(expectedName)s).", + "end-tag-too-early": + "End tag (%(name)s) seen too early. Expected other end tag.", + "end-tag-too-early-named": + "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", + "end-tag-too-early-ignored": + "End tag (%(name)s) seen too early. Ignored.", + "adoption-agency-1.1": + "End tag (%(name)s) violates step 1, " + "paragraph 1 of the adoption agency algorithm.", + "adoption-agency-1.2": + "End tag (%(name)s) violates step 1, " + "paragraph 2 of the adoption agency algorithm.", + "adoption-agency-1.3": + "End tag (%(name)s) violates step 1, " + "paragraph 3 of the adoption agency algorithm.", + "adoption-agency-4.4": + "End tag (%(name)s) violates step 4, " + "paragraph 4 of the adoption agency algorithm.", + "unexpected-end-tag-treated-as": + "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", + "no-end-tag": + "This element (%(name)s) has no end tag.", + "unexpected-implied-end-tag-in-table": + "Unexpected implied end tag (%(name)s) in the table phase.", + "unexpected-implied-end-tag-in-table-body": + "Unexpected implied end tag (%(name)s) in the table body phase.", + "unexpected-char-implies-table-voodoo": + "Unexpected non-space characters in " + "table context caused voodoo mode.", + "unexpected-hidden-input-in-table": + "Unexpected input with type hidden in table context.", + "unexpected-form-in-table": + "Unexpected form in table context.", + "unexpected-start-tag-implies-table-voodoo": + "Unexpected start tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-end-tag-implies-table-voodoo": + "Unexpected end tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-cell-in-table-body": + "Unexpected table cell start tag (%(name)s) " + "in the table body phase.", + "unexpected-cell-end-tag": + "Got table cell end tag (%(name)s) " + "while required end tags are missing.", + "unexpected-end-tag-in-table-body": + "Unexpected end tag (%(name)s) in the table body phase. Ignored.", + "unexpected-implied-end-tag-in-table-row": + "Unexpected implied end tag (%(name)s) in the table row phase.", + "unexpected-end-tag-in-table-row": + "Unexpected end tag (%(name)s) in the table row phase. Ignored.", + "unexpected-select-in-select": + "Unexpected select start tag in the select phase " + "treated as select end tag.", + "unexpected-input-in-select": + "Unexpected input start tag in the select phase.", + "unexpected-start-tag-in-select": + "Unexpected start tag token (%(name)s in the select phase. " + "Ignored.", + "unexpected-end-tag-in-select": + "Unexpected end tag (%(name)s) in the select phase. Ignored.", + "unexpected-table-element-start-tag-in-select-in-table": + "Unexpected table element start tag (%(name)s) in the select in table phase.", + "unexpected-table-element-end-tag-in-select-in-table": + "Unexpected table element end tag (%(name)s) in the select in table phase.", + "unexpected-char-after-body": + "Unexpected non-space characters in the after body phase.", + "unexpected-start-tag-after-body": + "Unexpected start tag token (%(name)s)" + " in the after body phase.", + "unexpected-end-tag-after-body": + "Unexpected end tag token (%(name)s)" + " in the after body phase.", + "unexpected-char-in-frameset": + "Unexpected characters in the frameset phase. Characters ignored.", + "unexpected-start-tag-in-frameset": + "Unexpected start tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-frameset-in-frameset-innerhtml": + "Unexpected end tag token (frameset) " + "in the frameset phase (innerHTML).", + "unexpected-end-tag-in-frameset": + "Unexpected end tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-char-after-frameset": + "Unexpected non-space characters in the " + "after frameset phase. Ignored.", + "unexpected-start-tag-after-frameset": + "Unexpected start tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-frameset": + "Unexpected end tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-body-innerhtml": + "Unexpected end tag after body(innerHtml)", + "expected-eof-but-got-char": + "Unexpected non-space characters. Expected end of file.", + "expected-eof-but-got-start-tag": + "Unexpected start tag (%(name)s)" + ". Expected end of file.", + "expected-eof-but-got-end-tag": + "Unexpected end tag (%(name)s)" + ". Expected end of file.", + "eof-in-table": + "Unexpected end of file. Expected table content.", + "eof-in-select": + "Unexpected end of file. Expected select content.", + "eof-in-frameset": + "Unexpected end of file. Expected frameset content.", + "eof-in-script-in-script": + "Unexpected end of file. Expected script content.", + "eof-in-foreign-lands": + "Unexpected end of file. Expected foreign content", + "non-void-element-with-trailing-solidus": + "Trailing solidus not allowed on element %(name)s", + "unexpected-html-element-in-foreign-content": + "Element %(name)s not allowed in a non-html context", + "unexpected-end-tag-before-html": + "Unexpected end tag (%(name)s) before html.", + "unexpected-inhead-noscript-tag": + "Element %(name)s not allowed in a inhead-noscript context", + "eof-in-head-noscript": + "Unexpected end of file. Expected inhead-noscript content", + "char-in-head-noscript": + "Unexpected non-space character. Expected inhead-noscript content", + "XXX-undefined-error": + "Undefined error (this sucks and should be fixed)", +} + +namespaces = { + "html": "http://www.w3.org/1999/xhtml", + "mathml": "http://www.w3.org/1998/Math/MathML", + "svg": "http://www.w3.org/2000/svg", + "xlink": "http://www.w3.org/1999/xlink", + "xml": "http://www.w3.org/XML/1998/namespace", + "xmlns": "http://www.w3.org/2000/xmlns/" +} + +scopingElements = frozenset([ + (namespaces["html"], "applet"), + (namespaces["html"], "caption"), + (namespaces["html"], "html"), + (namespaces["html"], "marquee"), + (namespaces["html"], "object"), + (namespaces["html"], "table"), + (namespaces["html"], "td"), + (namespaces["html"], "th"), + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext"), + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title"), +]) + +formattingElements = frozenset([ + (namespaces["html"], "a"), + (namespaces["html"], "b"), + (namespaces["html"], "big"), + (namespaces["html"], "code"), + (namespaces["html"], "em"), + (namespaces["html"], "font"), + (namespaces["html"], "i"), + (namespaces["html"], "nobr"), + (namespaces["html"], "s"), + (namespaces["html"], "small"), + (namespaces["html"], "strike"), + (namespaces["html"], "strong"), + (namespaces["html"], "tt"), + (namespaces["html"], "u") +]) + +specialElements = frozenset([ + (namespaces["html"], "address"), + (namespaces["html"], "applet"), + (namespaces["html"], "area"), + (namespaces["html"], "article"), + (namespaces["html"], "aside"), + (namespaces["html"], "base"), + (namespaces["html"], "basefont"), + (namespaces["html"], "bgsound"), + (namespaces["html"], "blockquote"), + (namespaces["html"], "body"), + (namespaces["html"], "br"), + (namespaces["html"], "button"), + (namespaces["html"], "caption"), + (namespaces["html"], "center"), + (namespaces["html"], "col"), + (namespaces["html"], "colgroup"), + (namespaces["html"], "command"), + (namespaces["html"], "dd"), + (namespaces["html"], "details"), + (namespaces["html"], "dir"), + (namespaces["html"], "div"), + (namespaces["html"], "dl"), + (namespaces["html"], "dt"), + (namespaces["html"], "embed"), + (namespaces["html"], "fieldset"), + (namespaces["html"], "figure"), + (namespaces["html"], "footer"), + (namespaces["html"], "form"), + (namespaces["html"], "frame"), + (namespaces["html"], "frameset"), + (namespaces["html"], "h1"), + (namespaces["html"], "h2"), + (namespaces["html"], "h3"), + (namespaces["html"], "h4"), + (namespaces["html"], "h5"), + (namespaces["html"], "h6"), + (namespaces["html"], "head"), + (namespaces["html"], "header"), + (namespaces["html"], "hr"), + (namespaces["html"], "html"), + (namespaces["html"], "iframe"), + # Note that image is commented out in the spec as "this isn't an + # element that can end up on the stack, so it doesn't matter," + (namespaces["html"], "image"), + (namespaces["html"], "img"), + (namespaces["html"], "input"), + (namespaces["html"], "isindex"), + (namespaces["html"], "li"), + (namespaces["html"], "link"), + (namespaces["html"], "listing"), + (namespaces["html"], "marquee"), + (namespaces["html"], "menu"), + (namespaces["html"], "meta"), + (namespaces["html"], "nav"), + (namespaces["html"], "noembed"), + (namespaces["html"], "noframes"), + (namespaces["html"], "noscript"), + (namespaces["html"], "object"), + (namespaces["html"], "ol"), + (namespaces["html"], "p"), + (namespaces["html"], "param"), + (namespaces["html"], "plaintext"), + (namespaces["html"], "pre"), + (namespaces["html"], "script"), + (namespaces["html"], "section"), + (namespaces["html"], "select"), + (namespaces["html"], "style"), + (namespaces["html"], "table"), + (namespaces["html"], "tbody"), + (namespaces["html"], "td"), + (namespaces["html"], "textarea"), + (namespaces["html"], "tfoot"), + (namespaces["html"], "th"), + (namespaces["html"], "thead"), + (namespaces["html"], "title"), + (namespaces["html"], "tr"), + (namespaces["html"], "ul"), + (namespaces["html"], "wbr"), + (namespaces["html"], "xmp"), + (namespaces["svg"], "foreignObject") +]) + +htmlIntegrationPointElements = frozenset([ + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title") +]) + +mathmlTextIntegrationPointElements = frozenset([ + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext") +]) + +adjustSVGAttributes = { + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "contentscripttype": "contentScriptType", + "contentstyletype": "contentStyleType", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "externalresourcesrequired": "externalResourcesRequired", + "filterres": "filterRes", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan" +} + +adjustMathMLAttributes = {"definitionurl": "definitionURL"} + +adjustForeignAttributes = { + "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), + "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), + "xlink:href": ("xlink", "href", namespaces["xlink"]), + "xlink:role": ("xlink", "role", namespaces["xlink"]), + "xlink:show": ("xlink", "show", namespaces["xlink"]), + "xlink:title": ("xlink", "title", namespaces["xlink"]), + "xlink:type": ("xlink", "type", namespaces["xlink"]), + "xml:base": ("xml", "base", namespaces["xml"]), + "xml:lang": ("xml", "lang", namespaces["xml"]), + "xml:space": ("xml", "space", namespaces["xml"]), + "xmlns": (None, "xmlns", namespaces["xmlns"]), + "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) +} + +unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in + adjustForeignAttributes.items()} + +spaceCharacters = frozenset([ + "\t", + "\n", + "\u000C", + " ", + "\r" +]) + +tableInsertModeElements = frozenset([ + "table", + "tbody", + "tfoot", + "thead", + "tr" +]) + +asciiLowercase = frozenset(string.ascii_lowercase) +asciiUppercase = frozenset(string.ascii_uppercase) +asciiLetters = frozenset(string.ascii_letters) +digits = frozenset(string.digits) +hexDigits = frozenset(string.hexdigits) + +asciiUpper2Lower = {ord(c): ord(c.lower()) for c in string.ascii_uppercase} + +# Heading elements need to be ordered +headingElements = ( + "h1", + "h2", + "h3", + "h4", + "h5", + "h6" +) + +voidElements = frozenset([ + "base", + "command", + "event-source", + "link", + "meta", + "hr", + "br", + "img", + "embed", + "param", + "area", + "col", + "input", + "source", + "track" +]) + +cdataElements = frozenset(['title', 'textarea']) + +rcdataElements = frozenset([ + 'style', + 'script', + 'xmp', + 'iframe', + 'noembed', + 'noframes', + 'noscript' +]) + +booleanAttributes = { + "": frozenset(["irrelevant", "itemscope"]), + "style": frozenset(["scoped"]), + "img": frozenset(["ismap"]), + "audio": frozenset(["autoplay", "controls"]), + "video": frozenset(["autoplay", "controls"]), + "script": frozenset(["defer", "async"]), + "details": frozenset(["open"]), + "datagrid": frozenset(["multiple", "disabled"]), + "command": frozenset(["hidden", "disabled", "checked", "default"]), + "hr": frozenset(["noshade"]), + "menu": frozenset(["autosubmit"]), + "fieldset": frozenset(["disabled", "readonly"]), + "option": frozenset(["disabled", "readonly", "selected"]), + "optgroup": frozenset(["disabled", "readonly"]), + "button": frozenset(["disabled", "autofocus"]), + "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), + "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), + "output": frozenset(["disabled", "readonly"]), + "iframe": frozenset(["seamless"]), +} + +# entitiesWindows1252 has to be _ordered_ and needs to have an index. It +# therefore can't be a frozenset. +entitiesWindows1252 = ( + 8364, # 0x80 0x20AC EURO SIGN + 65533, # 0x81 UNDEFINED + 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK + 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK + 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK + 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS + 8224, # 0x86 0x2020 DAGGER + 8225, # 0x87 0x2021 DOUBLE DAGGER + 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT + 8240, # 0x89 0x2030 PER MILLE SIGN + 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON + 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK + 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE + 65533, # 0x8D UNDEFINED + 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON + 65533, # 0x8F UNDEFINED + 65533, # 0x90 UNDEFINED + 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK + 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK + 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK + 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK + 8226, # 0x95 0x2022 BULLET + 8211, # 0x96 0x2013 EN DASH + 8212, # 0x97 0x2014 EM DASH + 732, # 0x98 0x02DC SMALL TILDE + 8482, # 0x99 0x2122 TRADE MARK SIGN + 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON + 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE + 65533, # 0x9D UNDEFINED + 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON + 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS +) + +xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) + +entities = { + "AElig": "\xc6", + "AElig;": "\xc6", + "AMP": "&", + "AMP;": "&", + "Aacute": "\xc1", + "Aacute;": "\xc1", + "Abreve;": "\u0102", + "Acirc": "\xc2", + "Acirc;": "\xc2", + "Acy;": "\u0410", + "Afr;": "\U0001d504", + "Agrave": "\xc0", + "Agrave;": "\xc0", + "Alpha;": "\u0391", + "Amacr;": "\u0100", + "And;": "\u2a53", + "Aogon;": "\u0104", + "Aopf;": "\U0001d538", + "ApplyFunction;": "\u2061", + "Aring": "\xc5", + "Aring;": "\xc5", + "Ascr;": "\U0001d49c", + "Assign;": "\u2254", + "Atilde": "\xc3", + "Atilde;": "\xc3", + "Auml": "\xc4", + "Auml;": "\xc4", + "Backslash;": "\u2216", + "Barv;": "\u2ae7", + "Barwed;": "\u2306", + "Bcy;": "\u0411", + "Because;": "\u2235", + "Bernoullis;": "\u212c", + "Beta;": "\u0392", + "Bfr;": "\U0001d505", + "Bopf;": "\U0001d539", + "Breve;": "\u02d8", + "Bscr;": "\u212c", + "Bumpeq;": "\u224e", + "CHcy;": "\u0427", + "COPY": "\xa9", + "COPY;": "\xa9", + "Cacute;": "\u0106", + "Cap;": "\u22d2", + "CapitalDifferentialD;": "\u2145", + "Cayleys;": "\u212d", + "Ccaron;": "\u010c", + "Ccedil": "\xc7", + "Ccedil;": "\xc7", + "Ccirc;": "\u0108", + "Cconint;": "\u2230", + "Cdot;": "\u010a", + "Cedilla;": "\xb8", + "CenterDot;": "\xb7", + "Cfr;": "\u212d", + "Chi;": "\u03a7", + "CircleDot;": "\u2299", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201d", + "CloseCurlyQuote;": "\u2019", + "Colon;": "\u2237", + "Colone;": "\u2a74", + "Congruent;": "\u2261", + "Conint;": "\u222f", + "ContourIntegral;": "\u222e", + "Copf;": "\u2102", + "Coproduct;": "\u2210", + "CounterClockwiseContourIntegral;": "\u2233", + "Cross;": "\u2a2f", + "Cscr;": "\U0001d49e", + "Cup;": "\u22d3", + "CupCap;": "\u224d", + "DD;": "\u2145", + "DDotrahd;": "\u2911", + "DJcy;": "\u0402", + "DScy;": "\u0405", + "DZcy;": "\u040f", + "Dagger;": "\u2021", + "Darr;": "\u21a1", + "Dashv;": "\u2ae4", + "Dcaron;": "\u010e", + "Dcy;": "\u0414", + "Del;": "\u2207", + "Delta;": "\u0394", + "Dfr;": "\U0001d507", + "DiacriticalAcute;": "\xb4", + "DiacriticalDot;": "\u02d9", + "DiacriticalDoubleAcute;": "\u02dd", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02dc", + "Diamond;": "\u22c4", + "DifferentialD;": "\u2146", + "Dopf;": "\U0001d53b", + "Dot;": "\xa8", + "DotDot;": "\u20dc", + "DotEqual;": "\u2250", + "DoubleContourIntegral;": "\u222f", + "DoubleDot;": "\xa8", + "DoubleDownArrow;": "\u21d3", + "DoubleLeftArrow;": "\u21d0", + "DoubleLeftRightArrow;": "\u21d4", + "DoubleLeftTee;": "\u2ae4", + "DoubleLongLeftArrow;": "\u27f8", + "DoubleLongLeftRightArrow;": "\u27fa", + "DoubleLongRightArrow;": "\u27f9", + "DoubleRightArrow;": "\u21d2", + "DoubleRightTee;": "\u22a8", + "DoubleUpArrow;": "\u21d1", + "DoubleUpDownArrow;": "\u21d5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21f5", + "DownBreve;": "\u0311", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295e", + "DownLeftVector;": "\u21bd", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295f", + "DownRightVector;": "\u21c1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22a4", + "DownTeeArrow;": "\u21a7", + "Downarrow;": "\u21d3", + "Dscr;": "\U0001d49f", + "Dstrok;": "\u0110", + "ENG;": "\u014a", + "ETH": "\xd0", + "ETH;": "\xd0", + "Eacute": "\xc9", + "Eacute;": "\xc9", + "Ecaron;": "\u011a", + "Ecirc": "\xca", + "Ecirc;": "\xca", + "Ecy;": "\u042d", + "Edot;": "\u0116", + "Efr;": "\U0001d508", + "Egrave": "\xc8", + "Egrave;": "\xc8", + "Element;": "\u2208", + "Emacr;": "\u0112", + "EmptySmallSquare;": "\u25fb", + "EmptyVerySmallSquare;": "\u25ab", + "Eogon;": "\u0118", + "Eopf;": "\U0001d53c", + "Epsilon;": "\u0395", + "Equal;": "\u2a75", + "EqualTilde;": "\u2242", + "Equilibrium;": "\u21cc", + "Escr;": "\u2130", + "Esim;": "\u2a73", + "Eta;": "\u0397", + "Euml": "\xcb", + "Euml;": "\xcb", + "Exists;": "\u2203", + "ExponentialE;": "\u2147", + "Fcy;": "\u0424", + "Ffr;": "\U0001d509", + "FilledSmallSquare;": "\u25fc", + "FilledVerySmallSquare;": "\u25aa", + "Fopf;": "\U0001d53d", + "ForAll;": "\u2200", + "Fouriertrf;": "\u2131", + "Fscr;": "\u2131", + "GJcy;": "\u0403", + "GT": ">", + "GT;": ">", + "Gamma;": "\u0393", + "Gammad;": "\u03dc", + "Gbreve;": "\u011e", + "Gcedil;": "\u0122", + "Gcirc;": "\u011c", + "Gcy;": "\u0413", + "Gdot;": "\u0120", + "Gfr;": "\U0001d50a", + "Gg;": "\u22d9", + "Gopf;": "\U0001d53e", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22db", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2aa2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2a7e", + "GreaterTilde;": "\u2273", + "Gscr;": "\U0001d4a2", + "Gt;": "\u226b", + "HARDcy;": "\u042a", + "Hacek;": "\u02c7", + "Hat;": "^", + "Hcirc;": "\u0124", + "Hfr;": "\u210c", + "HilbertSpace;": "\u210b", + "Hopf;": "\u210d", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210b", + "Hstrok;": "\u0126", + "HumpDownHump;": "\u224e", + "HumpEqual;": "\u224f", + "IEcy;": "\u0415", + "IJlig;": "\u0132", + "IOcy;": "\u0401", + "Iacute": "\xcd", + "Iacute;": "\xcd", + "Icirc": "\xce", + "Icirc;": "\xce", + "Icy;": "\u0418", + "Idot;": "\u0130", + "Ifr;": "\u2111", + "Igrave": "\xcc", + "Igrave;": "\xcc", + "Im;": "\u2111", + "Imacr;": "\u012a", + "ImaginaryI;": "\u2148", + "Implies;": "\u21d2", + "Int;": "\u222c", + "Integral;": "\u222b", + "Intersection;": "\u22c2", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "Iogon;": "\u012e", + "Iopf;": "\U0001d540", + "Iota;": "\u0399", + "Iscr;": "\u2110", + "Itilde;": "\u0128", + "Iukcy;": "\u0406", + "Iuml": "\xcf", + "Iuml;": "\xcf", + "Jcirc;": "\u0134", + "Jcy;": "\u0419", + "Jfr;": "\U0001d50d", + "Jopf;": "\U0001d541", + "Jscr;": "\U0001d4a5", + "Jsercy;": "\u0408", + "Jukcy;": "\u0404", + "KHcy;": "\u0425", + "KJcy;": "\u040c", + "Kappa;": "\u039a", + "Kcedil;": "\u0136", + "Kcy;": "\u041a", + "Kfr;": "\U0001d50e", + "Kopf;": "\U0001d542", + "Kscr;": "\U0001d4a6", + "LJcy;": "\u0409", + "LT": "<", + "LT;": "<", + "Lacute;": "\u0139", + "Lambda;": "\u039b", + "Lang;": "\u27ea", + "Laplacetrf;": "\u2112", + "Larr;": "\u219e", + "Lcaron;": "\u013d", + "Lcedil;": "\u013b", + "Lcy;": "\u041b", + "LeftAngleBracket;": "\u27e8", + "LeftArrow;": "\u2190", + "LeftArrowBar;": "\u21e4", + "LeftArrowRightArrow;": "\u21c6", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27e6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21c3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230a", + "LeftRightArrow;": "\u2194", + "LeftRightVector;": "\u294e", + "LeftTee;": "\u22a3", + "LeftTeeArrow;": "\u21a4", + "LeftTeeVector;": "\u295a", + "LeftTriangle;": "\u22b2", + "LeftTriangleBar;": "\u29cf", + "LeftTriangleEqual;": "\u22b4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21bf", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21bc", + "LeftVectorBar;": "\u2952", + "Leftarrow;": "\u21d0", + "Leftrightarrow;": "\u21d4", + "LessEqualGreater;": "\u22da", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "LessLess;": "\u2aa1", + "LessSlantEqual;": "\u2a7d", + "LessTilde;": "\u2272", + "Lfr;": "\U0001d50f", + "Ll;": "\u22d8", + "Lleftarrow;": "\u21da", + "Lmidot;": "\u013f", + "LongLeftArrow;": "\u27f5", + "LongLeftRightArrow;": "\u27f7", + "LongRightArrow;": "\u27f6", + "Longleftarrow;": "\u27f8", + "Longleftrightarrow;": "\u27fa", + "Longrightarrow;": "\u27f9", + "Lopf;": "\U0001d543", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "Lscr;": "\u2112", + "Lsh;": "\u21b0", + "Lstrok;": "\u0141", + "Lt;": "\u226a", + "Map;": "\u2905", + "Mcy;": "\u041c", + "MediumSpace;": "\u205f", + "Mellintrf;": "\u2133", + "Mfr;": "\U0001d510", + "MinusPlus;": "\u2213", + "Mopf;": "\U0001d544", + "Mscr;": "\u2133", + "Mu;": "\u039c", + "NJcy;": "\u040a", + "Nacute;": "\u0143", + "Ncaron;": "\u0147", + "Ncedil;": "\u0145", + "Ncy;": "\u041d", + "NegativeMediumSpace;": "\u200b", + "NegativeThickSpace;": "\u200b", + "NegativeThinSpace;": "\u200b", + "NegativeVeryThinSpace;": "\u200b", + "NestedGreaterGreater;": "\u226b", + "NestedLessLess;": "\u226a", + "NewLine;": "\n", + "Nfr;": "\U0001d511", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\xa0", + "Nopf;": "\u2115", + "Not;": "\u2aec", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226d", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226f", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226b\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2a7e\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224e\u0338", + "NotHumpEqual;": "\u224f\u0338", + "NotLeftTriangle;": "\u22ea", + "NotLeftTriangleBar;": "\u29cf\u0338", + "NotLeftTriangleEqual;": "\u22ec", + "NotLess;": "\u226e", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226a\u0338", + "NotLessSlantEqual;": "\u2a7d\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2aa2\u0338", + "NotNestedLessLess;": "\u2aa1\u0338", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2aaf\u0338", + "NotPrecedesSlantEqual;": "\u22e0", + "NotReverseElement;": "\u220c", + "NotRightTriangle;": "\u22eb", + "NotRightTriangleBar;": "\u29d0\u0338", + "NotRightTriangleEqual;": "\u22ed", + "NotSquareSubset;": "\u228f\u0338", + "NotSquareSubsetEqual;": "\u22e2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22e3", + "NotSubset;": "\u2282\u20d2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2ab0\u0338", + "NotSucceedsSlantEqual;": "\u22e1", + "NotSucceedsTilde;": "\u227f\u0338", + "NotSuperset;": "\u2283\u20d2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "Nscr;": "\U0001d4a9", + "Ntilde": "\xd1", + "Ntilde;": "\xd1", + "Nu;": "\u039d", + "OElig;": "\u0152", + "Oacute": "\xd3", + "Oacute;": "\xd3", + "Ocirc": "\xd4", + "Ocirc;": "\xd4", + "Ocy;": "\u041e", + "Odblac;": "\u0150", + "Ofr;": "\U0001d512", + "Ograve": "\xd2", + "Ograve;": "\xd2", + "Omacr;": "\u014c", + "Omega;": "\u03a9", + "Omicron;": "\u039f", + "Oopf;": "\U0001d546", + "OpenCurlyDoubleQuote;": "\u201c", + "OpenCurlyQuote;": "\u2018", + "Or;": "\u2a54", + "Oscr;": "\U0001d4aa", + "Oslash": "\xd8", + "Oslash;": "\xd8", + "Otilde": "\xd5", + "Otilde;": "\xd5", + "Otimes;": "\u2a37", + "Ouml": "\xd6", + "Ouml;": "\xd6", + "OverBar;": "\u203e", + "OverBrace;": "\u23de", + "OverBracket;": "\u23b4", + "OverParenthesis;": "\u23dc", + "PartialD;": "\u2202", + "Pcy;": "\u041f", + "Pfr;": "\U0001d513", + "Phi;": "\u03a6", + "Pi;": "\u03a0", + "PlusMinus;": "\xb1", + "Poincareplane;": "\u210c", + "Popf;": "\u2119", + "Pr;": "\u2abb", + "Precedes;": "\u227a", + "PrecedesEqual;": "\u2aaf", + "PrecedesSlantEqual;": "\u227c", + "PrecedesTilde;": "\u227e", + "Prime;": "\u2033", + "Product;": "\u220f", + "Proportion;": "\u2237", + "Proportional;": "\u221d", + "Pscr;": "\U0001d4ab", + "Psi;": "\u03a8", + "QUOT": "\"", + "QUOT;": "\"", + "Qfr;": "\U0001d514", + "Qopf;": "\u211a", + "Qscr;": "\U0001d4ac", + "RBarr;": "\u2910", + "REG": "\xae", + "REG;": "\xae", + "Racute;": "\u0154", + "Rang;": "\u27eb", + "Rarr;": "\u21a0", + "Rarrtl;": "\u2916", + "Rcaron;": "\u0158", + "Rcedil;": "\u0156", + "Rcy;": "\u0420", + "Re;": "\u211c", + "ReverseElement;": "\u220b", + "ReverseEquilibrium;": "\u21cb", + "ReverseUpEquilibrium;": "\u296f", + "Rfr;": "\u211c", + "Rho;": "\u03a1", + "RightAngleBracket;": "\u27e9", + "RightArrow;": "\u2192", + "RightArrowBar;": "\u21e5", + "RightArrowLeftArrow;": "\u21c4", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27e7", + "RightDownTeeVector;": "\u295d", + "RightDownVector;": "\u21c2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230b", + "RightTee;": "\u22a2", + "RightTeeArrow;": "\u21a6", + "RightTeeVector;": "\u295b", + "RightTriangle;": "\u22b3", + "RightTriangleBar;": "\u29d0", + "RightTriangleEqual;": "\u22b5", + "RightUpDownVector;": "\u294f", + "RightUpTeeVector;": "\u295c", + "RightUpVector;": "\u21be", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21c0", + "RightVectorBar;": "\u2953", + "Rightarrow;": "\u21d2", + "Ropf;": "\u211d", + "RoundImplies;": "\u2970", + "Rrightarrow;": "\u21db", + "Rscr;": "\u211b", + "Rsh;": "\u21b1", + "RuleDelayed;": "\u29f4", + "SHCHcy;": "\u0429", + "SHcy;": "\u0428", + "SOFTcy;": "\u042c", + "Sacute;": "\u015a", + "Sc;": "\u2abc", + "Scaron;": "\u0160", + "Scedil;": "\u015e", + "Scirc;": "\u015c", + "Scy;": "\u0421", + "Sfr;": "\U0001d516", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "Sigma;": "\u03a3", + "SmallCircle;": "\u2218", + "Sopf;": "\U0001d54a", + "Sqrt;": "\u221a", + "Square;": "\u25a1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228f", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "Sscr;": "\U0001d4ae", + "Star;": "\u22c6", + "Sub;": "\u22d0", + "Subset;": "\u22d0", + "SubsetEqual;": "\u2286", + "Succeeds;": "\u227b", + "SucceedsEqual;": "\u2ab0", + "SucceedsSlantEqual;": "\u227d", + "SucceedsTilde;": "\u227f", + "SuchThat;": "\u220b", + "Sum;": "\u2211", + "Sup;": "\u22d1", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "Supset;": "\u22d1", + "THORN": "\xde", + "THORN;": "\xde", + "TRADE;": "\u2122", + "TSHcy;": "\u040b", + "TScy;": "\u0426", + "Tab;": "\t", + "Tau;": "\u03a4", + "Tcaron;": "\u0164", + "Tcedil;": "\u0162", + "Tcy;": "\u0422", + "Tfr;": "\U0001d517", + "Therefore;": "\u2234", + "Theta;": "\u0398", + "ThickSpace;": "\u205f\u200a", + "ThinSpace;": "\u2009", + "Tilde;": "\u223c", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "Topf;": "\U0001d54b", + "TripleDot;": "\u20db", + "Tscr;": "\U0001d4af", + "Tstrok;": "\u0166", + "Uacute": "\xda", + "Uacute;": "\xda", + "Uarr;": "\u219f", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040e", + "Ubreve;": "\u016c", + "Ucirc": "\xdb", + "Ucirc;": "\xdb", + "Ucy;": "\u0423", + "Udblac;": "\u0170", + "Ufr;": "\U0001d518", + "Ugrave": "\xd9", + "Ugrave;": "\xd9", + "Umacr;": "\u016a", + "UnderBar;": "_", + "UnderBrace;": "\u23df", + "UnderBracket;": "\u23b5", + "UnderParenthesis;": "\u23dd", + "Union;": "\u22c3", + "UnionPlus;": "\u228e", + "Uogon;": "\u0172", + "Uopf;": "\U0001d54c", + "UpArrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21c5", + "UpDownArrow;": "\u2195", + "UpEquilibrium;": "\u296e", + "UpTee;": "\u22a5", + "UpTeeArrow;": "\u21a5", + "Uparrow;": "\u21d1", + "Updownarrow;": "\u21d5", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03d2", + "Upsilon;": "\u03a5", + "Uring;": "\u016e", + "Uscr;": "\U0001d4b0", + "Utilde;": "\u0168", + "Uuml": "\xdc", + "Uuml;": "\xdc", + "VDash;": "\u22ab", + "Vbar;": "\u2aeb", + "Vcy;": "\u0412", + "Vdash;": "\u22a9", + "Vdashl;": "\u2ae6", + "Vee;": "\u22c1", + "Verbar;": "\u2016", + "Vert;": "\u2016", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200a", + "Vfr;": "\U0001d519", + "Vopf;": "\U0001d54d", + "Vscr;": "\U0001d4b1", + "Vvdash;": "\u22aa", + "Wcirc;": "\u0174", + "Wedge;": "\u22c0", + "Wfr;": "\U0001d51a", + "Wopf;": "\U0001d54e", + "Wscr;": "\U0001d4b2", + "Xfr;": "\U0001d51b", + "Xi;": "\u039e", + "Xopf;": "\U0001d54f", + "Xscr;": "\U0001d4b3", + "YAcy;": "\u042f", + "YIcy;": "\u0407", + "YUcy;": "\u042e", + "Yacute": "\xdd", + "Yacute;": "\xdd", + "Ycirc;": "\u0176", + "Ycy;": "\u042b", + "Yfr;": "\U0001d51c", + "Yopf;": "\U0001d550", + "Yscr;": "\U0001d4b4", + "Yuml;": "\u0178", + "ZHcy;": "\u0416", + "Zacute;": "\u0179", + "Zcaron;": "\u017d", + "Zcy;": "\u0417", + "Zdot;": "\u017b", + "ZeroWidthSpace;": "\u200b", + "Zeta;": "\u0396", + "Zfr;": "\u2128", + "Zopf;": "\u2124", + "Zscr;": "\U0001d4b5", + "aacute": "\xe1", + "aacute;": "\xe1", + "abreve;": "\u0103", + "ac;": "\u223e", + "acE;": "\u223e\u0333", + "acd;": "\u223f", + "acirc": "\xe2", + "acirc;": "\xe2", + "acute": "\xb4", + "acute;": "\xb4", + "acy;": "\u0430", + "aelig": "\xe6", + "aelig;": "\xe6", + "af;": "\u2061", + "afr;": "\U0001d51e", + "agrave": "\xe0", + "agrave;": "\xe0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "alpha;": "\u03b1", + "amacr;": "\u0101", + "amalg;": "\u2a3f", + "amp": "&", + "amp;": "&", + "and;": "\u2227", + "andand;": "\u2a55", + "andd;": "\u2a5c", + "andslope;": "\u2a58", + "andv;": "\u2a5a", + "ang;": "\u2220", + "ange;": "\u29a4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29a8", + "angmsdab;": "\u29a9", + "angmsdac;": "\u29aa", + "angmsdad;": "\u29ab", + "angmsdae;": "\u29ac", + "angmsdaf;": "\u29ad", + "angmsdag;": "\u29ae", + "angmsdah;": "\u29af", + "angrt;": "\u221f", + "angrtvb;": "\u22be", + "angrtvbd;": "\u299d", + "angsph;": "\u2222", + "angst;": "\xc5", + "angzarr;": "\u237c", + "aogon;": "\u0105", + "aopf;": "\U0001d552", + "ap;": "\u2248", + "apE;": "\u2a70", + "apacir;": "\u2a6f", + "ape;": "\u224a", + "apid;": "\u224b", + "apos;": "'", + "approx;": "\u2248", + "approxeq;": "\u224a", + "aring": "\xe5", + "aring;": "\xe5", + "ascr;": "\U0001d4b6", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224d", + "atilde": "\xe3", + "atilde;": "\xe3", + "auml": "\xe4", + "auml;": "\xe4", + "awconint;": "\u2233", + "awint;": "\u2a11", + "bNot;": "\u2aed", + "backcong;": "\u224c", + "backepsilon;": "\u03f6", + "backprime;": "\u2035", + "backsim;": "\u223d", + "backsimeq;": "\u22cd", + "barvee;": "\u22bd", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23b5", + "bbrktbrk;": "\u23b6", + "bcong;": "\u224c", + "bcy;": "\u0431", + "bdquo;": "\u201e", + "becaus;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29b0", + "bepsi;": "\u03f6", + "bernou;": "\u212c", + "beta;": "\u03b2", + "beth;": "\u2136", + "between;": "\u226c", + "bfr;": "\U0001d51f", + "bigcap;": "\u22c2", + "bigcirc;": "\u25ef", + "bigcup;": "\u22c3", + "bigodot;": "\u2a00", + "bigoplus;": "\u2a01", + "bigotimes;": "\u2a02", + "bigsqcup;": "\u2a06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25bd", + "bigtriangleup;": "\u25b3", + "biguplus;": "\u2a04", + "bigvee;": "\u22c1", + "bigwedge;": "\u22c0", + "bkarow;": "\u290d", + "blacklozenge;": "\u29eb", + "blacksquare;": "\u25aa", + "blacktriangle;": "\u25b4", + "blacktriangledown;": "\u25be", + "blacktriangleleft;": "\u25c2", + "blacktriangleright;": "\u25b8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20e5", + "bnequiv;": "\u2261\u20e5", + "bnot;": "\u2310", + "bopf;": "\U0001d553", + "bot;": "\u22a5", + "bottom;": "\u22a5", + "bowtie;": "\u22c8", + "boxDL;": "\u2557", + "boxDR;": "\u2554", + "boxDl;": "\u2556", + "boxDr;": "\u2553", + "boxH;": "\u2550", + "boxHD;": "\u2566", + "boxHU;": "\u2569", + "boxHd;": "\u2564", + "boxHu;": "\u2567", + "boxUL;": "\u255d", + "boxUR;": "\u255a", + "boxUl;": "\u255c", + "boxUr;": "\u2559", + "boxV;": "\u2551", + "boxVH;": "\u256c", + "boxVL;": "\u2563", + "boxVR;": "\u2560", + "boxVh;": "\u256b", + "boxVl;": "\u2562", + "boxVr;": "\u255f", + "boxbox;": "\u29c9", + "boxdL;": "\u2555", + "boxdR;": "\u2552", + "boxdl;": "\u2510", + "boxdr;": "\u250c", + "boxh;": "\u2500", + "boxhD;": "\u2565", + "boxhU;": "\u2568", + "boxhd;": "\u252c", + "boxhu;": "\u2534", + "boxminus;": "\u229f", + "boxplus;": "\u229e", + "boxtimes;": "\u22a0", + "boxuL;": "\u255b", + "boxuR;": "\u2558", + "boxul;": "\u2518", + "boxur;": "\u2514", + "boxv;": "\u2502", + "boxvH;": "\u256a", + "boxvL;": "\u2561", + "boxvR;": "\u255e", + "boxvh;": "\u253c", + "boxvl;": "\u2524", + "boxvr;": "\u251c", + "bprime;": "\u2035", + "breve;": "\u02d8", + "brvbar": "\xa6", + "brvbar;": "\xa6", + "bscr;": "\U0001d4b7", + "bsemi;": "\u204f", + "bsim;": "\u223d", + "bsime;": "\u22cd", + "bsol;": "\\", + "bsolb;": "\u29c5", + "bsolhsub;": "\u27c8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224e", + "bumpE;": "\u2aae", + "bumpe;": "\u224f", + "bumpeq;": "\u224f", + "cacute;": "\u0107", + "cap;": "\u2229", + "capand;": "\u2a44", + "capbrcup;": "\u2a49", + "capcap;": "\u2a4b", + "capcup;": "\u2a47", + "capdot;": "\u2a40", + "caps;": "\u2229\ufe00", + "caret;": "\u2041", + "caron;": "\u02c7", + "ccaps;": "\u2a4d", + "ccaron;": "\u010d", + "ccedil": "\xe7", + "ccedil;": "\xe7", + "ccirc;": "\u0109", + "ccups;": "\u2a4c", + "ccupssm;": "\u2a50", + "cdot;": "\u010b", + "cedil": "\xb8", + "cedil;": "\xb8", + "cemptyv;": "\u29b2", + "cent": "\xa2", + "cent;": "\xa2", + "centerdot;": "\xb7", + "cfr;": "\U0001d520", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "chi;": "\u03c7", + "cir;": "\u25cb", + "cirE;": "\u29c3", + "circ;": "\u02c6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21ba", + "circlearrowright;": "\u21bb", + "circledR;": "\xae", + "circledS;": "\u24c8", + "circledast;": "\u229b", + "circledcirc;": "\u229a", + "circleddash;": "\u229d", + "cire;": "\u2257", + "cirfnint;": "\u2a10", + "cirmid;": "\u2aef", + "cirscir;": "\u29c2", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "colon;": ":", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2a6d", + "conint;": "\u222e", + "copf;": "\U0001d554", + "coprod;": "\u2210", + "copy": "\xa9", + "copy;": "\xa9", + "copysr;": "\u2117", + "crarr;": "\u21b5", + "cross;": "\u2717", + "cscr;": "\U0001d4b8", + "csub;": "\u2acf", + "csube;": "\u2ad1", + "csup;": "\u2ad0", + "csupe;": "\u2ad2", + "ctdot;": "\u22ef", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22de", + "cuesc;": "\u22df", + "cularr;": "\u21b6", + "cularrp;": "\u293d", + "cup;": "\u222a", + "cupbrcap;": "\u2a48", + "cupcap;": "\u2a46", + "cupcup;": "\u2a4a", + "cupdot;": "\u228d", + "cupor;": "\u2a45", + "cups;": "\u222a\ufe00", + "curarr;": "\u21b7", + "curarrm;": "\u293c", + "curlyeqprec;": "\u22de", + "curlyeqsucc;": "\u22df", + "curlyvee;": "\u22ce", + "curlywedge;": "\u22cf", + "curren": "\xa4", + "curren;": "\xa4", + "curvearrowleft;": "\u21b6", + "curvearrowright;": "\u21b7", + "cuvee;": "\u22ce", + "cuwed;": "\u22cf", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232d", + "dArr;": "\u21d3", + "dHar;": "\u2965", + "dagger;": "\u2020", + "daleth;": "\u2138", + "darr;": "\u2193", + "dash;": "\u2010", + "dashv;": "\u22a3", + "dbkarow;": "\u290f", + "dblac;": "\u02dd", + "dcaron;": "\u010f", + "dcy;": "\u0434", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21ca", + "ddotseq;": "\u2a77", + "deg": "\xb0", + "deg;": "\xb0", + "delta;": "\u03b4", + "demptyv;": "\u29b1", + "dfisht;": "\u297f", + "dfr;": "\U0001d521", + "dharl;": "\u21c3", + "dharr;": "\u21c2", + "diam;": "\u22c4", + "diamond;": "\u22c4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\xa8", + "digamma;": "\u03dd", + "disin;": "\u22f2", + "div;": "\xf7", + "divide": "\xf7", + "divide;": "\xf7", + "divideontimes;": "\u22c7", + "divonx;": "\u22c7", + "djcy;": "\u0452", + "dlcorn;": "\u231e", + "dlcrop;": "\u230d", + "dollar;": "$", + "dopf;": "\U0001d555", + "dot;": "\u02d9", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22a1", + "doublebarwedge;": "\u2306", + "downarrow;": "\u2193", + "downdownarrows;": "\u21ca", + "downharpoonleft;": "\u21c3", + "downharpoonright;": "\u21c2", + "drbkarow;": "\u2910", + "drcorn;": "\u231f", + "drcrop;": "\u230c", + "dscr;": "\U0001d4b9", + "dscy;": "\u0455", + "dsol;": "\u29f6", + "dstrok;": "\u0111", + "dtdot;": "\u22f1", + "dtri;": "\u25bf", + "dtrif;": "\u25be", + "duarr;": "\u21f5", + "duhar;": "\u296f", + "dwangle;": "\u29a6", + "dzcy;": "\u045f", + "dzigrarr;": "\u27ff", + "eDDot;": "\u2a77", + "eDot;": "\u2251", + "eacute": "\xe9", + "eacute;": "\xe9", + "easter;": "\u2a6e", + "ecaron;": "\u011b", + "ecir;": "\u2256", + "ecirc": "\xea", + "ecirc;": "\xea", + "ecolon;": "\u2255", + "ecy;": "\u044d", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "efr;": "\U0001d522", + "eg;": "\u2a9a", + "egrave": "\xe8", + "egrave;": "\xe8", + "egs;": "\u2a96", + "egsdot;": "\u2a98", + "el;": "\u2a99", + "elinters;": "\u23e7", + "ell;": "\u2113", + "els;": "\u2a95", + "elsdot;": "\u2a97", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "emptyv;": "\u2205", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "emsp;": "\u2003", + "eng;": "\u014b", + "ensp;": "\u2002", + "eogon;": "\u0119", + "eopf;": "\U0001d556", + "epar;": "\u22d5", + "eparsl;": "\u29e3", + "eplus;": "\u2a71", + "epsi;": "\u03b5", + "epsilon;": "\u03b5", + "epsiv;": "\u03f5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2a96", + "eqslantless;": "\u2a95", + "equals;": "=", + "equest;": "\u225f", + "equiv;": "\u2261", + "equivDD;": "\u2a78", + "eqvparsl;": "\u29e5", + "erDot;": "\u2253", + "erarr;": "\u2971", + "escr;": "\u212f", + "esdot;": "\u2250", + "esim;": "\u2242", + "eta;": "\u03b7", + "eth": "\xf0", + "eth;": "\xf0", + "euml": "\xeb", + "euml;": "\xeb", + "euro;": "\u20ac", + "excl;": "!", + "exist;": "\u2203", + "expectation;": "\u2130", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\ufb03", + "fflig;": "\ufb00", + "ffllig;": "\ufb04", + "ffr;": "\U0001d523", + "filig;": "\ufb01", + "fjlig;": "fj", + "flat;": "\u266d", + "fllig;": "\ufb02", + "fltns;": "\u25b1", + "fnof;": "\u0192", + "fopf;": "\U0001d557", + "forall;": "\u2200", + "fork;": "\u22d4", + "forkv;": "\u2ad9", + "fpartint;": "\u2a0d", + "frac12": "\xbd", + "frac12;": "\xbd", + "frac13;": "\u2153", + "frac14": "\xbc", + "frac14;": "\xbc", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215b", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34": "\xbe", + "frac34;": "\xbe", + "frac35;": "\u2157", + "frac38;": "\u215c", + "frac45;": "\u2158", + "frac56;": "\u215a", + "frac58;": "\u215d", + "frac78;": "\u215e", + "frasl;": "\u2044", + "frown;": "\u2322", + "fscr;": "\U0001d4bb", + "gE;": "\u2267", + "gEl;": "\u2a8c", + "gacute;": "\u01f5", + "gamma;": "\u03b3", + "gammad;": "\u03dd", + "gap;": "\u2a86", + "gbreve;": "\u011f", + "gcirc;": "\u011d", + "gcy;": "\u0433", + "gdot;": "\u0121", + "ge;": "\u2265", + "gel;": "\u22db", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2a7e", + "ges;": "\u2a7e", + "gescc;": "\u2aa9", + "gesdot;": "\u2a80", + "gesdoto;": "\u2a82", + "gesdotol;": "\u2a84", + "gesl;": "\u22db\ufe00", + "gesles;": "\u2a94", + "gfr;": "\U0001d524", + "gg;": "\u226b", + "ggg;": "\u22d9", + "gimel;": "\u2137", + "gjcy;": "\u0453", + "gl;": "\u2277", + "glE;": "\u2a92", + "gla;": "\u2aa5", + "glj;": "\u2aa4", + "gnE;": "\u2269", + "gnap;": "\u2a8a", + "gnapprox;": "\u2a8a", + "gne;": "\u2a88", + "gneq;": "\u2a88", + "gneqq;": "\u2269", + "gnsim;": "\u22e7", + "gopf;": "\U0001d558", + "grave;": "`", + "gscr;": "\u210a", + "gsim;": "\u2273", + "gsime;": "\u2a8e", + "gsiml;": "\u2a90", + "gt": ">", + "gt;": ">", + "gtcc;": "\u2aa7", + "gtcir;": "\u2a7a", + "gtdot;": "\u22d7", + "gtlPar;": "\u2995", + "gtquest;": "\u2a7c", + "gtrapprox;": "\u2a86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22d7", + "gtreqless;": "\u22db", + "gtreqqless;": "\u2a8c", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\ufe00", + "gvnE;": "\u2269\ufe00", + "hArr;": "\u21d4", + "hairsp;": "\u200a", + "half;": "\xbd", + "hamilt;": "\u210b", + "hardcy;": "\u044a", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21ad", + "hbar;": "\u210f", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22b9", + "hfr;": "\U0001d525", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21ff", + "homtht;": "\u223b", + "hookleftarrow;": "\u21a9", + "hookrightarrow;": "\u21aa", + "hopf;": "\U0001d559", + "horbar;": "\u2015", + "hscr;": "\U0001d4bd", + "hslash;": "\u210f", + "hstrok;": "\u0127", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "iacute": "\xed", + "iacute;": "\xed", + "ic;": "\u2063", + "icirc": "\xee", + "icirc;": "\xee", + "icy;": "\u0438", + "iecy;": "\u0435", + "iexcl": "\xa1", + "iexcl;": "\xa1", + "iff;": "\u21d4", + "ifr;": "\U0001d526", + "igrave": "\xec", + "igrave;": "\xec", + "ii;": "\u2148", + "iiiint;": "\u2a0c", + "iiint;": "\u222d", + "iinfin;": "\u29dc", + "iiota;": "\u2129", + "ijlig;": "\u0133", + "imacr;": "\u012b", + "image;": "\u2111", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22b7", + "imped;": "\u01b5", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221e", + "infintie;": "\u29dd", + "inodot;": "\u0131", + "int;": "\u222b", + "intcal;": "\u22ba", + "integers;": "\u2124", + "intercal;": "\u22ba", + "intlarhk;": "\u2a17", + "intprod;": "\u2a3c", + "iocy;": "\u0451", + "iogon;": "\u012f", + "iopf;": "\U0001d55a", + "iota;": "\u03b9", + "iprod;": "\u2a3c", + "iquest": "\xbf", + "iquest;": "\xbf", + "iscr;": "\U0001d4be", + "isin;": "\u2208", + "isinE;": "\u22f9", + "isindot;": "\u22f5", + "isins;": "\u22f4", + "isinsv;": "\u22f3", + "isinv;": "\u2208", + "it;": "\u2062", + "itilde;": "\u0129", + "iukcy;": "\u0456", + "iuml": "\xef", + "iuml;": "\xef", + "jcirc;": "\u0135", + "jcy;": "\u0439", + "jfr;": "\U0001d527", + "jmath;": "\u0237", + "jopf;": "\U0001d55b", + "jscr;": "\U0001d4bf", + "jsercy;": "\u0458", + "jukcy;": "\u0454", + "kappa;": "\u03ba", + "kappav;": "\u03f0", + "kcedil;": "\u0137", + "kcy;": "\u043a", + "kfr;": "\U0001d528", + "kgreen;": "\u0138", + "khcy;": "\u0445", + "kjcy;": "\u045c", + "kopf;": "\U0001d55c", + "kscr;": "\U0001d4c0", + "lAarr;": "\u21da", + "lArr;": "\u21d0", + "lAtail;": "\u291b", + "lBarr;": "\u290e", + "lE;": "\u2266", + "lEg;": "\u2a8b", + "lHar;": "\u2962", + "lacute;": "\u013a", + "laemptyv;": "\u29b4", + "lagran;": "\u2112", + "lambda;": "\u03bb", + "lang;": "\u27e8", + "langd;": "\u2991", + "langle;": "\u27e8", + "lap;": "\u2a85", + "laquo": "\xab", + "laquo;": "\xab", + "larr;": "\u2190", + "larrb;": "\u21e4", + "larrbfs;": "\u291f", + "larrfs;": "\u291d", + "larrhk;": "\u21a9", + "larrlp;": "\u21ab", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21a2", + "lat;": "\u2aab", + "latail;": "\u2919", + "late;": "\u2aad", + "lates;": "\u2aad\ufe00", + "lbarr;": "\u290c", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298b", + "lbrksld;": "\u298f", + "lbrkslu;": "\u298d", + "lcaron;": "\u013e", + "lcedil;": "\u013c", + "lceil;": "\u2308", + "lcub;": "{", + "lcy;": "\u043b", + "ldca;": "\u2936", + "ldquo;": "\u201c", + "ldquor;": "\u201e", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294b", + "ldsh;": "\u21b2", + "le;": "\u2264", + "leftarrow;": "\u2190", + "leftarrowtail;": "\u21a2", + "leftharpoondown;": "\u21bd", + "leftharpoonup;": "\u21bc", + "leftleftarrows;": "\u21c7", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21c6", + "leftrightharpoons;": "\u21cb", + "leftrightsquigarrow;": "\u21ad", + "leftthreetimes;": "\u22cb", + "leg;": "\u22da", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2a7d", + "les;": "\u2a7d", + "lescc;": "\u2aa8", + "lesdot;": "\u2a7f", + "lesdoto;": "\u2a81", + "lesdotor;": "\u2a83", + "lesg;": "\u22da\ufe00", + "lesges;": "\u2a93", + "lessapprox;": "\u2a85", + "lessdot;": "\u22d6", + "lesseqgtr;": "\u22da", + "lesseqqgtr;": "\u2a8b", + "lessgtr;": "\u2276", + "lesssim;": "\u2272", + "lfisht;": "\u297c", + "lfloor;": "\u230a", + "lfr;": "\U0001d529", + "lg;": "\u2276", + "lgE;": "\u2a91", + "lhard;": "\u21bd", + "lharu;": "\u21bc", + "lharul;": "\u296a", + "lhblk;": "\u2584", + "ljcy;": "\u0459", + "ll;": "\u226a", + "llarr;": "\u21c7", + "llcorner;": "\u231e", + "llhard;": "\u296b", + "lltri;": "\u25fa", + "lmidot;": "\u0140", + "lmoust;": "\u23b0", + "lmoustache;": "\u23b0", + "lnE;": "\u2268", + "lnap;": "\u2a89", + "lnapprox;": "\u2a89", + "lne;": "\u2a87", + "lneq;": "\u2a87", + "lneqq;": "\u2268", + "lnsim;": "\u22e6", + "loang;": "\u27ec", + "loarr;": "\u21fd", + "lobrk;": "\u27e6", + "longleftarrow;": "\u27f5", + "longleftrightarrow;": "\u27f7", + "longmapsto;": "\u27fc", + "longrightarrow;": "\u27f6", + "looparrowleft;": "\u21ab", + "looparrowright;": "\u21ac", + "lopar;": "\u2985", + "lopf;": "\U0001d55d", + "loplus;": "\u2a2d", + "lotimes;": "\u2a34", + "lowast;": "\u2217", + "lowbar;": "_", + "loz;": "\u25ca", + "lozenge;": "\u25ca", + "lozf;": "\u29eb", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21c6", + "lrcorner;": "\u231f", + "lrhar;": "\u21cb", + "lrhard;": "\u296d", + "lrm;": "\u200e", + "lrtri;": "\u22bf", + "lsaquo;": "\u2039", + "lscr;": "\U0001d4c1", + "lsh;": "\u21b0", + "lsim;": "\u2272", + "lsime;": "\u2a8d", + "lsimg;": "\u2a8f", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201a", + "lstrok;": "\u0142", + "lt": "<", + "lt;": "<", + "ltcc;": "\u2aa6", + "ltcir;": "\u2a79", + "ltdot;": "\u22d6", + "lthree;": "\u22cb", + "ltimes;": "\u22c9", + "ltlarr;": "\u2976", + "ltquest;": "\u2a7b", + "ltrPar;": "\u2996", + "ltri;": "\u25c3", + "ltrie;": "\u22b4", + "ltrif;": "\u25c2", + "lurdshar;": "\u294a", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\ufe00", + "lvnE;": "\u2268\ufe00", + "mDDot;": "\u223a", + "macr": "\xaf", + "macr;": "\xaf", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "map;": "\u21a6", + "mapsto;": "\u21a6", + "mapstodown;": "\u21a7", + "mapstoleft;": "\u21a4", + "mapstoup;": "\u21a5", + "marker;": "\u25ae", + "mcomma;": "\u2a29", + "mcy;": "\u043c", + "mdash;": "\u2014", + "measuredangle;": "\u2221", + "mfr;": "\U0001d52a", + "mho;": "\u2127", + "micro": "\xb5", + "micro;": "\xb5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2af0", + "middot": "\xb7", + "middot;": "\xb7", + "minus;": "\u2212", + "minusb;": "\u229f", + "minusd;": "\u2238", + "minusdu;": "\u2a2a", + "mlcp;": "\u2adb", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22a7", + "mopf;": "\U0001d55e", + "mp;": "\u2213", + "mscr;": "\U0001d4c2", + "mstpos;": "\u223e", + "mu;": "\u03bc", + "multimap;": "\u22b8", + "mumap;": "\u22b8", + "nGg;": "\u22d9\u0338", + "nGt;": "\u226b\u20d2", + "nGtv;": "\u226b\u0338", + "nLeftarrow;": "\u21cd", + "nLeftrightarrow;": "\u21ce", + "nLl;": "\u22d8\u0338", + "nLt;": "\u226a\u20d2", + "nLtv;": "\u226a\u0338", + "nRightarrow;": "\u21cf", + "nVDash;": "\u22af", + "nVdash;": "\u22ae", + "nabla;": "\u2207", + "nacute;": "\u0144", + "nang;": "\u2220\u20d2", + "nap;": "\u2249", + "napE;": "\u2a70\u0338", + "napid;": "\u224b\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266e", + "natural;": "\u266e", + "naturals;": "\u2115", + "nbsp": "\xa0", + "nbsp;": "\xa0", + "nbump;": "\u224e\u0338", + "nbumpe;": "\u224f\u0338", + "ncap;": "\u2a43", + "ncaron;": "\u0148", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2a6d\u0338", + "ncup;": "\u2a42", + "ncy;": "\u043d", + "ndash;": "\u2013", + "ne;": "\u2260", + "neArr;": "\u21d7", + "nearhk;": "\u2924", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "nexist;": "\u2204", + "nexists;": "\u2204", + "nfr;": "\U0001d52b", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2a7e\u0338", + "nges;": "\u2a7e\u0338", + "ngsim;": "\u2275", + "ngt;": "\u226f", + "ngtr;": "\u226f", + "nhArr;": "\u21ce", + "nharr;": "\u21ae", + "nhpar;": "\u2af2", + "ni;": "\u220b", + "nis;": "\u22fc", + "nisd;": "\u22fa", + "niv;": "\u220b", + "njcy;": "\u045a", + "nlArr;": "\u21cd", + "nlE;": "\u2266\u0338", + "nlarr;": "\u219a", + "nldr;": "\u2025", + "nle;": "\u2270", + "nleftarrow;": "\u219a", + "nleftrightarrow;": "\u21ae", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2a7d\u0338", + "nles;": "\u2a7d\u0338", + "nless;": "\u226e", + "nlsim;": "\u2274", + "nlt;": "\u226e", + "nltri;": "\u22ea", + "nltrie;": "\u22ec", + "nmid;": "\u2224", + "nopf;": "\U0001d55f", + "not": "\xac", + "not;": "\xac", + "notin;": "\u2209", + "notinE;": "\u22f9\u0338", + "notindot;": "\u22f5\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22f7", + "notinvc;": "\u22f6", + "notni;": "\u220c", + "notniva;": "\u220c", + "notnivb;": "\u22fe", + "notnivc;": "\u22fd", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2afd\u20e5", + "npart;": "\u2202\u0338", + "npolint;": "\u2a14", + "npr;": "\u2280", + "nprcue;": "\u22e0", + "npre;": "\u2aaf\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2aaf\u0338", + "nrArr;": "\u21cf", + "nrarr;": "\u219b", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219d\u0338", + "nrightarrow;": "\u219b", + "nrtri;": "\u22eb", + "nrtrie;": "\u22ed", + "nsc;": "\u2281", + "nsccue;": "\u22e1", + "nsce;": "\u2ab0\u0338", + "nscr;": "\U0001d4c3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22e2", + "nsqsupe;": "\u22e3", + "nsub;": "\u2284", + "nsubE;": "\u2ac5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20d2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2ac5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2ab0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2ac6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20d2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2ac6\u0338", + "ntgl;": "\u2279", + "ntilde": "\xf1", + "ntilde;": "\xf1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22ea", + "ntrianglelefteq;": "\u22ec", + "ntriangleright;": "\u22eb", + "ntrianglerighteq;": "\u22ed", + "nu;": "\u03bd", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvDash;": "\u22ad", + "nvHarr;": "\u2904", + "nvap;": "\u224d\u20d2", + "nvdash;": "\u22ac", + "nvge;": "\u2265\u20d2", + "nvgt;": ">\u20d2", + "nvinfin;": "\u29de", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20d2", + "nvlt;": "<\u20d2", + "nvltrie;": "\u22b4\u20d2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22b5\u20d2", + "nvsim;": "\u223c\u20d2", + "nwArr;": "\u21d6", + "nwarhk;": "\u2923", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "oS;": "\u24c8", + "oacute": "\xf3", + "oacute;": "\xf3", + "oast;": "\u229b", + "ocir;": "\u229a", + "ocirc": "\xf4", + "ocirc;": "\xf4", + "ocy;": "\u043e", + "odash;": "\u229d", + "odblac;": "\u0151", + "odiv;": "\u2a38", + "odot;": "\u2299", + "odsold;": "\u29bc", + "oelig;": "\u0153", + "ofcir;": "\u29bf", + "ofr;": "\U0001d52c", + "ogon;": "\u02db", + "ograve": "\xf2", + "ograve;": "\xf2", + "ogt;": "\u29c1", + "ohbar;": "\u29b5", + "ohm;": "\u03a9", + "oint;": "\u222e", + "olarr;": "\u21ba", + "olcir;": "\u29be", + "olcross;": "\u29bb", + "oline;": "\u203e", + "olt;": "\u29c0", + "omacr;": "\u014d", + "omega;": "\u03c9", + "omicron;": "\u03bf", + "omid;": "\u29b6", + "ominus;": "\u2296", + "oopf;": "\U0001d560", + "opar;": "\u29b7", + "operp;": "\u29b9", + "oplus;": "\u2295", + "or;": "\u2228", + "orarr;": "\u21bb", + "ord;": "\u2a5d", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf": "\xaa", + "ordf;": "\xaa", + "ordm": "\xba", + "ordm;": "\xba", + "origof;": "\u22b6", + "oror;": "\u2a56", + "orslope;": "\u2a57", + "orv;": "\u2a5b", + "oscr;": "\u2134", + "oslash": "\xf8", + "oslash;": "\xf8", + "osol;": "\u2298", + "otilde": "\xf5", + "otilde;": "\xf5", + "otimes;": "\u2297", + "otimesas;": "\u2a36", + "ouml": "\xf6", + "ouml;": "\xf6", + "ovbar;": "\u233d", + "par;": "\u2225", + "para": "\xb6", + "para;": "\xb6", + "parallel;": "\u2225", + "parsim;": "\u2af3", + "parsl;": "\u2afd", + "part;": "\u2202", + "pcy;": "\u043f", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22a5", + "pertenk;": "\u2031", + "pfr;": "\U0001d52d", + "phi;": "\u03c6", + "phiv;": "\u03d5", + "phmmat;": "\u2133", + "phone;": "\u260e", + "pi;": "\u03c0", + "pitchfork;": "\u22d4", + "piv;": "\u03d6", + "planck;": "\u210f", + "planckh;": "\u210e", + "plankv;": "\u210f", + "plus;": "+", + "plusacir;": "\u2a23", + "plusb;": "\u229e", + "pluscir;": "\u2a22", + "plusdo;": "\u2214", + "plusdu;": "\u2a25", + "pluse;": "\u2a72", + "plusmn": "\xb1", + "plusmn;": "\xb1", + "plussim;": "\u2a26", + "plustwo;": "\u2a27", + "pm;": "\xb1", + "pointint;": "\u2a15", + "popf;": "\U0001d561", + "pound": "\xa3", + "pound;": "\xa3", + "pr;": "\u227a", + "prE;": "\u2ab3", + "prap;": "\u2ab7", + "prcue;": "\u227c", + "pre;": "\u2aaf", + "prec;": "\u227a", + "precapprox;": "\u2ab7", + "preccurlyeq;": "\u227c", + "preceq;": "\u2aaf", + "precnapprox;": "\u2ab9", + "precneqq;": "\u2ab5", + "precnsim;": "\u22e8", + "precsim;": "\u227e", + "prime;": "\u2032", + "primes;": "\u2119", + "prnE;": "\u2ab5", + "prnap;": "\u2ab9", + "prnsim;": "\u22e8", + "prod;": "\u220f", + "profalar;": "\u232e", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221d", + "propto;": "\u221d", + "prsim;": "\u227e", + "prurel;": "\u22b0", + "pscr;": "\U0001d4c5", + "psi;": "\u03c8", + "puncsp;": "\u2008", + "qfr;": "\U0001d52e", + "qint;": "\u2a0c", + "qopf;": "\U0001d562", + "qprime;": "\u2057", + "qscr;": "\U0001d4c6", + "quaternions;": "\u210d", + "quatint;": "\u2a16", + "quest;": "?", + "questeq;": "\u225f", + "quot": "\"", + "quot;": "\"", + "rAarr;": "\u21db", + "rArr;": "\u21d2", + "rAtail;": "\u291c", + "rBarr;": "\u290f", + "rHar;": "\u2964", + "race;": "\u223d\u0331", + "racute;": "\u0155", + "radic;": "\u221a", + "raemptyv;": "\u29b3", + "rang;": "\u27e9", + "rangd;": "\u2992", + "range;": "\u29a5", + "rangle;": "\u27e9", + "raquo": "\xbb", + "raquo;": "\xbb", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21e5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291e", + "rarrhk;": "\u21aa", + "rarrlp;": "\u21ac", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "rarrtl;": "\u21a3", + "rarrw;": "\u219d", + "ratail;": "\u291a", + "ratio;": "\u2236", + "rationals;": "\u211a", + "rbarr;": "\u290d", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298c", + "rbrksld;": "\u298e", + "rbrkslu;": "\u2990", + "rcaron;": "\u0159", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201d", + "rdquor;": "\u201d", + "rdsh;": "\u21b3", + "real;": "\u211c", + "realine;": "\u211b", + "realpart;": "\u211c", + "reals;": "\u211d", + "rect;": "\u25ad", + "reg": "\xae", + "reg;": "\xae", + "rfisht;": "\u297d", + "rfloor;": "\u230b", + "rfr;": "\U0001d52f", + "rhard;": "\u21c1", + "rharu;": "\u21c0", + "rharul;": "\u296c", + "rho;": "\u03c1", + "rhov;": "\u03f1", + "rightarrow;": "\u2192", + "rightarrowtail;": "\u21a3", + "rightharpoondown;": "\u21c1", + "rightharpoonup;": "\u21c0", + "rightleftarrows;": "\u21c4", + "rightleftharpoons;": "\u21cc", + "rightrightarrows;": "\u21c9", + "rightsquigarrow;": "\u219d", + "rightthreetimes;": "\u22cc", + "ring;": "\u02da", + "risingdotseq;": "\u2253", + "rlarr;": "\u21c4", + "rlhar;": "\u21cc", + "rlm;": "\u200f", + "rmoust;": "\u23b1", + "rmoustache;": "\u23b1", + "rnmid;": "\u2aee", + "roang;": "\u27ed", + "roarr;": "\u21fe", + "robrk;": "\u27e7", + "ropar;": "\u2986", + "ropf;": "\U0001d563", + "roplus;": "\u2a2e", + "rotimes;": "\u2a35", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2a12", + "rrarr;": "\u21c9", + "rsaquo;": "\u203a", + "rscr;": "\U0001d4c7", + "rsh;": "\u21b1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22cc", + "rtimes;": "\u22ca", + "rtri;": "\u25b9", + "rtrie;": "\u22b5", + "rtrif;": "\u25b8", + "rtriltri;": "\u29ce", + "ruluhar;": "\u2968", + "rx;": "\u211e", + "sacute;": "\u015b", + "sbquo;": "\u201a", + "sc;": "\u227b", + "scE;": "\u2ab4", + "scap;": "\u2ab8", + "scaron;": "\u0161", + "sccue;": "\u227d", + "sce;": "\u2ab0", + "scedil;": "\u015f", + "scirc;": "\u015d", + "scnE;": "\u2ab6", + "scnap;": "\u2aba", + "scnsim;": "\u22e9", + "scpolint;": "\u2a13", + "scsim;": "\u227f", + "scy;": "\u0441", + "sdot;": "\u22c5", + "sdotb;": "\u22a1", + "sdote;": "\u2a66", + "seArr;": "\u21d8", + "searhk;": "\u2925", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect": "\xa7", + "sect;": "\xa7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "sfr;": "\U0001d530", + "sfrown;": "\u2322", + "sharp;": "\u266f", + "shchcy;": "\u0449", + "shcy;": "\u0448", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "shy": "\xad", + "shy;": "\xad", + "sigma;": "\u03c3", + "sigmaf;": "\u03c2", + "sigmav;": "\u03c2", + "sim;": "\u223c", + "simdot;": "\u2a6a", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2a9e", + "simgE;": "\u2aa0", + "siml;": "\u2a9d", + "simlE;": "\u2a9f", + "simne;": "\u2246", + "simplus;": "\u2a24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "smallsetminus;": "\u2216", + "smashp;": "\u2a33", + "smeparsl;": "\u29e4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2aaa", + "smte;": "\u2aac", + "smtes;": "\u2aac\ufe00", + "softcy;": "\u044c", + "sol;": "/", + "solb;": "\u29c4", + "solbar;": "\u233f", + "sopf;": "\U0001d564", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\ufe00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\ufe00", + "sqsub;": "\u228f", + "sqsube;": "\u2291", + "sqsubset;": "\u228f", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25a1", + "square;": "\u25a1", + "squarf;": "\u25aa", + "squf;": "\u25aa", + "srarr;": "\u2192", + "sscr;": "\U0001d4c8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22c6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03f5", + "straightphi;": "\u03d5", + "strns;": "\xaf", + "sub;": "\u2282", + "subE;": "\u2ac5", + "subdot;": "\u2abd", + "sube;": "\u2286", + "subedot;": "\u2ac3", + "submult;": "\u2ac1", + "subnE;": "\u2acb", + "subne;": "\u228a", + "subplus;": "\u2abf", + "subrarr;": "\u2979", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2ac5", + "subsetneq;": "\u228a", + "subsetneqq;": "\u2acb", + "subsim;": "\u2ac7", + "subsub;": "\u2ad5", + "subsup;": "\u2ad3", + "succ;": "\u227b", + "succapprox;": "\u2ab8", + "succcurlyeq;": "\u227d", + "succeq;": "\u2ab0", + "succnapprox;": "\u2aba", + "succneqq;": "\u2ab6", + "succnsim;": "\u22e9", + "succsim;": "\u227f", + "sum;": "\u2211", + "sung;": "\u266a", + "sup1": "\xb9", + "sup1;": "\xb9", + "sup2": "\xb2", + "sup2;": "\xb2", + "sup3": "\xb3", + "sup3;": "\xb3", + "sup;": "\u2283", + "supE;": "\u2ac6", + "supdot;": "\u2abe", + "supdsub;": "\u2ad8", + "supe;": "\u2287", + "supedot;": "\u2ac4", + "suphsol;": "\u27c9", + "suphsub;": "\u2ad7", + "suplarr;": "\u297b", + "supmult;": "\u2ac2", + "supnE;": "\u2acc", + "supne;": "\u228b", + "supplus;": "\u2ac0", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2ac6", + "supsetneq;": "\u228b", + "supsetneqq;": "\u2acc", + "supsim;": "\u2ac8", + "supsub;": "\u2ad4", + "supsup;": "\u2ad6", + "swArr;": "\u21d9", + "swarhk;": "\u2926", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292a", + "szlig": "\xdf", + "szlig;": "\xdf", + "target;": "\u2316", + "tau;": "\u03c4", + "tbrk;": "\u23b4", + "tcaron;": "\u0165", + "tcedil;": "\u0163", + "tcy;": "\u0442", + "tdot;": "\u20db", + "telrec;": "\u2315", + "tfr;": "\U0001d531", + "there4;": "\u2234", + "therefore;": "\u2234", + "theta;": "\u03b8", + "thetasym;": "\u03d1", + "thetav;": "\u03d1", + "thickapprox;": "\u2248", + "thicksim;": "\u223c", + "thinsp;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223c", + "thorn": "\xfe", + "thorn;": "\xfe", + "tilde;": "\u02dc", + "times": "\xd7", + "times;": "\xd7", + "timesb;": "\u22a0", + "timesbar;": "\u2a31", + "timesd;": "\u2a30", + "tint;": "\u222d", + "toea;": "\u2928", + "top;": "\u22a4", + "topbot;": "\u2336", + "topcir;": "\u2af1", + "topf;": "\U0001d565", + "topfork;": "\u2ada", + "tosa;": "\u2929", + "tprime;": "\u2034", + "trade;": "\u2122", + "triangle;": "\u25b5", + "triangledown;": "\u25bf", + "triangleleft;": "\u25c3", + "trianglelefteq;": "\u22b4", + "triangleq;": "\u225c", + "triangleright;": "\u25b9", + "trianglerighteq;": "\u22b5", + "tridot;": "\u25ec", + "trie;": "\u225c", + "triminus;": "\u2a3a", + "triplus;": "\u2a39", + "trisb;": "\u29cd", + "tritime;": "\u2a3b", + "trpezium;": "\u23e2", + "tscr;": "\U0001d4c9", + "tscy;": "\u0446", + "tshcy;": "\u045b", + "tstrok;": "\u0167", + "twixt;": "\u226c", + "twoheadleftarrow;": "\u219e", + "twoheadrightarrow;": "\u21a0", + "uArr;": "\u21d1", + "uHar;": "\u2963", + "uacute": "\xfa", + "uacute;": "\xfa", + "uarr;": "\u2191", + "ubrcy;": "\u045e", + "ubreve;": "\u016d", + "ucirc": "\xfb", + "ucirc;": "\xfb", + "ucy;": "\u0443", + "udarr;": "\u21c5", + "udblac;": "\u0171", + "udhar;": "\u296e", + "ufisht;": "\u297e", + "ufr;": "\U0001d532", + "ugrave": "\xf9", + "ugrave;": "\xf9", + "uharl;": "\u21bf", + "uharr;": "\u21be", + "uhblk;": "\u2580", + "ulcorn;": "\u231c", + "ulcorner;": "\u231c", + "ulcrop;": "\u230f", + "ultri;": "\u25f8", + "umacr;": "\u016b", + "uml": "\xa8", + "uml;": "\xa8", + "uogon;": "\u0173", + "uopf;": "\U0001d566", + "uparrow;": "\u2191", + "updownarrow;": "\u2195", + "upharpoonleft;": "\u21bf", + "upharpoonright;": "\u21be", + "uplus;": "\u228e", + "upsi;": "\u03c5", + "upsih;": "\u03d2", + "upsilon;": "\u03c5", + "upuparrows;": "\u21c8", + "urcorn;": "\u231d", + "urcorner;": "\u231d", + "urcrop;": "\u230e", + "uring;": "\u016f", + "urtri;": "\u25f9", + "uscr;": "\U0001d4ca", + "utdot;": "\u22f0", + "utilde;": "\u0169", + "utri;": "\u25b5", + "utrif;": "\u25b4", + "uuarr;": "\u21c8", + "uuml": "\xfc", + "uuml;": "\xfc", + "uwangle;": "\u29a7", + "vArr;": "\u21d5", + "vBar;": "\u2ae8", + "vBarv;": "\u2ae9", + "vDash;": "\u22a8", + "vangrt;": "\u299c", + "varepsilon;": "\u03f5", + "varkappa;": "\u03f0", + "varnothing;": "\u2205", + "varphi;": "\u03d5", + "varpi;": "\u03d6", + "varpropto;": "\u221d", + "varr;": "\u2195", + "varrho;": "\u03f1", + "varsigma;": "\u03c2", + "varsubsetneq;": "\u228a\ufe00", + "varsubsetneqq;": "\u2acb\ufe00", + "varsupsetneq;": "\u228b\ufe00", + "varsupsetneqq;": "\u2acc\ufe00", + "vartheta;": "\u03d1", + "vartriangleleft;": "\u22b2", + "vartriangleright;": "\u22b3", + "vcy;": "\u0432", + "vdash;": "\u22a2", + "vee;": "\u2228", + "veebar;": "\u22bb", + "veeeq;": "\u225a", + "vellip;": "\u22ee", + "verbar;": "|", + "vert;": "|", + "vfr;": "\U0001d533", + "vltri;": "\u22b2", + "vnsub;": "\u2282\u20d2", + "vnsup;": "\u2283\u20d2", + "vopf;": "\U0001d567", + "vprop;": "\u221d", + "vrtri;": "\u22b3", + "vscr;": "\U0001d4cb", + "vsubnE;": "\u2acb\ufe00", + "vsubne;": "\u228a\ufe00", + "vsupnE;": "\u2acc\ufe00", + "vsupne;": "\u228b\ufe00", + "vzigzag;": "\u299a", + "wcirc;": "\u0175", + "wedbar;": "\u2a5f", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "wfr;": "\U0001d534", + "wopf;": "\U0001d568", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "wscr;": "\U0001d4cc", + "xcap;": "\u22c2", + "xcirc;": "\u25ef", + "xcup;": "\u22c3", + "xdtri;": "\u25bd", + "xfr;": "\U0001d535", + "xhArr;": "\u27fa", + "xharr;": "\u27f7", + "xi;": "\u03be", + "xlArr;": "\u27f8", + "xlarr;": "\u27f5", + "xmap;": "\u27fc", + "xnis;": "\u22fb", + "xodot;": "\u2a00", + "xopf;": "\U0001d569", + "xoplus;": "\u2a01", + "xotime;": "\u2a02", + "xrArr;": "\u27f9", + "xrarr;": "\u27f6", + "xscr;": "\U0001d4cd", + "xsqcup;": "\u2a06", + "xuplus;": "\u2a04", + "xutri;": "\u25b3", + "xvee;": "\u22c1", + "xwedge;": "\u22c0", + "yacute": "\xfd", + "yacute;": "\xfd", + "yacy;": "\u044f", + "ycirc;": "\u0177", + "ycy;": "\u044b", + "yen": "\xa5", + "yen;": "\xa5", + "yfr;": "\U0001d536", + "yicy;": "\u0457", + "yopf;": "\U0001d56a", + "yscr;": "\U0001d4ce", + "yucy;": "\u044e", + "yuml": "\xff", + "yuml;": "\xff", + "zacute;": "\u017a", + "zcaron;": "\u017e", + "zcy;": "\u0437", + "zdot;": "\u017c", + "zeetrf;": "\u2128", + "zeta;": "\u03b6", + "zfr;": "\U0001d537", + "zhcy;": "\u0436", + "zigrarr;": "\u21dd", + "zopf;": "\U0001d56b", + "zscr;": "\U0001d4cf", + "zwj;": "\u200d", + "zwnj;": "\u200c", +} + +replacementCharacters = { + 0x0: "\uFFFD", + 0x0d: "\u000D", + 0x80: "\u20AC", + 0x81: "\u0081", + 0x82: "\u201A", + 0x83: "\u0192", + 0x84: "\u201E", + 0x85: "\u2026", + 0x86: "\u2020", + 0x87: "\u2021", + 0x88: "\u02C6", + 0x89: "\u2030", + 0x8A: "\u0160", + 0x8B: "\u2039", + 0x8C: "\u0152", + 0x8D: "\u008D", + 0x8E: "\u017D", + 0x8F: "\u008F", + 0x90: "\u0090", + 0x91: "\u2018", + 0x92: "\u2019", + 0x93: "\u201C", + 0x94: "\u201D", + 0x95: "\u2022", + 0x96: "\u2013", + 0x97: "\u2014", + 0x98: "\u02DC", + 0x99: "\u2122", + 0x9A: "\u0161", + 0x9B: "\u203A", + 0x9C: "\u0153", + 0x9D: "\u009D", + 0x9E: "\u017E", + 0x9F: "\u0178", +} + +tokenTypes = { + "Doctype": 0, + "Characters": 1, + "SpaceCharacters": 2, + "StartTag": 3, + "EndTag": 4, + "EmptyTag": 5, + "Comment": 6, + "ParseError": 7 +} + +tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) + + +prefixes = {v: k for k, v in namespaces.items()} +prefixes["http://www.w3.org/1998/Math/MathML"] = "math" + + +class DataLossWarning(UserWarning): + """Raised when the current tree is unable to represent the input data""" + pass + + +class _ReparseException(Exception): + pass diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__init__.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 000000000..219793822 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-310.pyc new file mode 100644 index 000000000..1e208c390 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-310.pyc new file mode 100644 index 000000000..4e31393de Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-310.pyc new file mode 100644 index 000000000..620b654e4 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-310.pyc new file mode 100644 index 000000000..70f827293 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-310.pyc new file mode 100644 index 000000000..70344b098 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-310.pyc new file mode 100644 index 000000000..f85430919 Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-310.pyc b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-310.pyc new file mode 100644 index 000000000..670fe0f7a Binary files /dev/null and b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-310.pyc differ diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py new file mode 100644 index 000000000..5ba926e3b --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py @@ -0,0 +1,29 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + +from collections import OrderedDict + + +def _attr_key(attr): + """Return an appropriate key for an attribute for sorting + + Attributes have a namespace that can be either ``None`` or a string. We + can't compare the two because they're different types, so we convert + ``None`` to an empty string first. + + """ + return (attr[0][0] or ''), attr[0][1] + + +class Filter(base.Filter): + """Alphabetizes attributes for elements""" + def __iter__(self): + for token in base.Filter.__iter__(self): + if token["type"] in ("StartTag", "EmptyTag"): + attrs = OrderedDict() + for name, value in sorted(token["data"].items(), + key=_attr_key): + attrs[name] = value + token["data"] = attrs + yield token diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/base.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/base.py new file mode 100644 index 000000000..c7dbaed0f --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/base.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, unicode_literals + + +class Filter(object): + def __init__(self, source): + self.source = source + + def __iter__(self): + return iter(self.source) + + def __getattr__(self, name): + return getattr(self.source, name) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py new file mode 100644 index 000000000..aefb5c842 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py @@ -0,0 +1,73 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + """Injects ```` tag into head of document""" + def __init__(self, source, encoding): + """Creates a Filter + + :arg source: the source token stream + + :arg encoding: the encoding to set + + """ + base.Filter.__init__(self, source) + self.encoding = encoding + + def __iter__(self): + state = "pre_head" + meta_found = (self.encoding is None) + pending = [] + + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag": + if token["name"].lower() == "head": + state = "in_head" + + elif type == "EmptyTag": + if token["name"].lower() == "meta": + # replace charset with actual encoding + has_http_equiv_content_type = False + for (namespace, name), value in token["data"].items(): + if namespace is not None: + continue + elif name.lower() == 'charset': + token["data"][(namespace, name)] = self.encoding + meta_found = True + break + elif name == 'http-equiv' and value.lower() == 'content-type': + has_http_equiv_content_type = True + else: + if has_http_equiv_content_type and (None, "content") in token["data"]: + token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding + meta_found = True + + elif token["name"].lower() == "head" and not meta_found: + # insert meta into empty head + yield {"type": "StartTag", "name": "head", + "data": token["data"]} + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + yield {"type": "EndTag", "name": "head"} + meta_found = True + continue + + elif type == "EndTag": + if token["name"].lower() == "head" and pending: + # insert meta into head (if necessary) and flush pending queue + yield pending.pop(0) + if not meta_found: + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + while pending: + yield pending.pop(0) + meta_found = True + state = "post_head" + + if state == "in_head": + pending.append(token) + else: + yield token diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/lint.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/lint.py new file mode 100644 index 000000000..fcc07eec5 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/lint.py @@ -0,0 +1,93 @@ +from __future__ import absolute_import, division, unicode_literals + +from pip._vendor.six import text_type + +from . import base +from ..constants import namespaces, voidElements + +from ..constants import spaceCharacters +spaceCharacters = "".join(spaceCharacters) + + +class Filter(base.Filter): + """Lints the token stream for errors + + If it finds any errors, it'll raise an ``AssertionError``. + + """ + def __init__(self, source, require_matching_tags=True): + """Creates a Filter + + :arg source: the source token stream + + :arg require_matching_tags: whether or not to require matching tags + + """ + super(Filter, self).__init__(source) + self.require_matching_tags = require_matching_tags + + def __iter__(self): + open_elements = [] + for token in base.Filter.__iter__(self): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(token["data"], dict) + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert type == "EmptyTag" + else: + assert type == "StartTag" + if type == "StartTag" and self.require_matching_tags: + open_elements.append((namespace, name)) + for (namespace, name), value in token["data"].items(): + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + assert isinstance(value, text_type) + + elif type == "EndTag": + namespace = token["namespace"] + name = token["name"] + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) + assert name != "" + if (not namespace or namespace == namespaces["html"]) and name in voidElements: + assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name} + elif self.require_matching_tags: + start = open_elements.pop() + assert start == (namespace, name) + + elif type == "Comment": + data = token["data"] + assert isinstance(data, text_type) + + elif type in ("Characters", "SpaceCharacters"): + data = token["data"] + assert isinstance(data, text_type) + assert data != "" + if type == "SpaceCharacters": + assert data.strip(spaceCharacters) == "" + + elif type == "Doctype": + name = token["name"] + assert name is None or isinstance(name, text_type) + assert token["publicId"] is None or isinstance(name, text_type) + assert token["systemId"] is None or isinstance(name, text_type) + + elif type == "Entity": + assert isinstance(token["name"], text_type) + + elif type == "SerializerError": + assert isinstance(token["data"], text_type) + + else: + assert False, "Unknown token type: %(type)s" % {"type": type} + + yield token diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/optionaltags.py new file mode 100644 index 000000000..4a865012c --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/optionaltags.py @@ -0,0 +1,207 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import base + + +class Filter(base.Filter): + """Removes optional tags from the token stream""" + def slider(self): + previous1 = previous2 = None + for token in self.source: + if previous1 is not None: + yield previous2, previous1, token + previous2 = previous1 + previous1 = token + if previous1 is not None: + yield previous2, previous1, None + + def __iter__(self): + for previous, token, next in self.slider(): + type = token["type"] + if type == "StartTag": + if (token["data"] or + not self.is_optional_start(token["name"], previous, next)): + yield token + elif type == "EndTag": + if not self.is_optional_end(token["name"], next): + yield token + else: + yield token + + def is_optional_start(self, tagname, previous, next): + type = next and next["type"] or None + if tagname in 'html': + # An html element's start tag may be omitted if the first thing + # inside the html element is not a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname == 'head': + # A head element's start tag may be omitted if the first thing + # inside the head element is an element. + # XXX: we also omit the start tag if the head element is empty + if type in ("StartTag", "EmptyTag"): + return True + elif type == "EndTag": + return next["name"] == "head" + elif tagname == 'body': + # A body element's start tag may be omitted if the first thing + # inside the body element is not a space character or a comment, + # except if the first thing inside the body element is a script + # or style element and the node immediately preceding the body + # element is a head element whose end tag has been omitted. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we do not look at the preceding event, so we never omit + # the body element's start tag if it's followed by a script or + # a style element. + return next["name"] not in ('script', 'style') + else: + return True + elif tagname == 'colgroup': + # A colgroup element's start tag may be omitted if the first thing + # inside the colgroup element is a col element, and if the element + # is not immediately preceded by another colgroup element whose + # end tag has been omitted. + if type in ("StartTag", "EmptyTag"): + # XXX: we do not look at the preceding event, so instead we never + # omit the colgroup element's end tag when it is immediately + # followed by another colgroup element. See is_optional_end. + return next["name"] == "col" + else: + return False + elif tagname == 'tbody': + # A tbody element's start tag may be omitted if the first thing + # inside the tbody element is a tr element, and if the element is + # not immediately preceded by a tbody, thead, or tfoot element + # whose end tag has been omitted. + if type == "StartTag": + # omit the thead and tfoot elements' end tag when they are + # immediately followed by a tbody element. See is_optional_end. + if previous and previous['type'] == 'EndTag' and \ + previous['name'] in ('tbody', 'thead', 'tfoot'): + return False + return next["name"] == 'tr' + else: + return False + return False + + def is_optional_end(self, tagname, next): + type = next and next["type"] or None + if tagname in ('html', 'head', 'body'): + # An html element's end tag may be omitted if the html element + # is not immediately followed by a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname in ('li', 'optgroup', 'tr'): + # A li element's end tag may be omitted if the li element is + # immediately followed by another li element or if there is + # no more content in the parent element. + # An optgroup element's end tag may be omitted if the optgroup + # element is immediately followed by another optgroup element, + # or if there is no more content in the parent element. + # A tr element's end tag may be omitted if the tr element is + # immediately followed by another tr element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] == tagname + else: + return type == "EndTag" or type is None + elif tagname in ('dt', 'dd'): + # A dt element's end tag may be omitted if the dt element is + # immediately followed by another dt element or a dd element. + # A dd element's end tag may be omitted if the dd element is + # immediately followed by another dd element or a dt element, + # or if there is no more content in the parent element. + if type == "StartTag": + return next["name"] in ('dt', 'dd') + elif tagname == 'dd': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'p': + # A p element's end tag may be omitted if the p element is + # immediately followed by an address, article, aside, + # blockquote, datagrid, dialog, dir, div, dl, fieldset, + # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, + # nav, ol, p, pre, section, table, or ul, element, or if + # there is no more content in the parent element. + if type in ("StartTag", "EmptyTag"): + return next["name"] in ('address', 'article', 'aside', + 'blockquote', 'datagrid', 'dialog', + 'dir', 'div', 'dl', 'fieldset', 'footer', + 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', + 'header', 'hr', 'menu', 'nav', 'ol', + 'p', 'pre', 'section', 'table', 'ul') + else: + return type == "EndTag" or type is None + elif tagname == 'option': + # An option element's end tag may be omitted if the option + # element is immediately followed by another option element, + # or if it is immediately followed by an optgroup + # element, or if there is no more content in the parent + # element. + if type == "StartTag": + return next["name"] in ('option', 'optgroup') + else: + return type == "EndTag" or type is None + elif tagname in ('rt', 'rp'): + # An rt element's end tag may be omitted if the rt element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + # An rp element's end tag may be omitted if the rp element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('rt', 'rp') + else: + return type == "EndTag" or type is None + elif tagname == 'colgroup': + # A colgroup element's end tag may be omitted if the colgroup + # element is not immediately followed by a space character or + # a comment. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we also look for an immediately following colgroup + # element. See is_optional_start. + return next["name"] != 'colgroup' + else: + return True + elif tagname in ('thead', 'tbody'): + # A thead element's end tag may be omitted if the thead element + # is immediately followed by a tbody or tfoot element. + # A tbody element's end tag may be omitted if the tbody element + # is immediately followed by a tbody or tfoot element, or if + # there is no more content in the parent element. + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] in ['tbody', 'tfoot'] + elif tagname == 'tbody': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'tfoot': + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] == 'tbody' + else: + return type == "EndTag" or type is None + elif tagname in ('td', 'th'): + # A td element's end tag may be omitted if the td element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + # A th element's end tag may be omitted if the th element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('td', 'th') + else: + return type == "EndTag" or type is None + return False diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/sanitizer.py new file mode 100644 index 000000000..aa7431d13 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -0,0 +1,916 @@ +"""Deprecated from html5lib 1.1. + +See `here `_ for +information about its deprecation; `Bleach `_ +is recommended as a replacement. Please let us know in the aforementioned issue +if Bleach is unsuitable for your needs. + +""" +from __future__ import absolute_import, division, unicode_literals + +import re +import warnings +from xml.sax.saxutils import escape, unescape + +from pip._vendor.six.moves import urllib_parse as urlparse + +from . import base +from ..constants import namespaces, prefixes + +__all__ = ["Filter"] + + +_deprecation_msg = ( + "html5lib's sanitizer is deprecated; see " + + "https://github.com/html5lib/html5lib-python/issues/443 and please let " + + "us know if Bleach is unsuitable for your needs" +) + +warnings.warn(_deprecation_msg, DeprecationWarning) + +allowed_elements = frozenset(( + (namespaces['html'], 'a'), + (namespaces['html'], 'abbr'), + (namespaces['html'], 'acronym'), + (namespaces['html'], 'address'), + (namespaces['html'], 'area'), + (namespaces['html'], 'article'), + (namespaces['html'], 'aside'), + (namespaces['html'], 'audio'), + (namespaces['html'], 'b'), + (namespaces['html'], 'big'), + (namespaces['html'], 'blockquote'), + (namespaces['html'], 'br'), + (namespaces['html'], 'button'), + (namespaces['html'], 'canvas'), + (namespaces['html'], 'caption'), + (namespaces['html'], 'center'), + (namespaces['html'], 'cite'), + (namespaces['html'], 'code'), + (namespaces['html'], 'col'), + (namespaces['html'], 'colgroup'), + (namespaces['html'], 'command'), + (namespaces['html'], 'datagrid'), + (namespaces['html'], 'datalist'), + (namespaces['html'], 'dd'), + (namespaces['html'], 'del'), + (namespaces['html'], 'details'), + (namespaces['html'], 'dfn'), + (namespaces['html'], 'dialog'), + (namespaces['html'], 'dir'), + (namespaces['html'], 'div'), + (namespaces['html'], 'dl'), + (namespaces['html'], 'dt'), + (namespaces['html'], 'em'), + (namespaces['html'], 'event-source'), + (namespaces['html'], 'fieldset'), + (namespaces['html'], 'figcaption'), + (namespaces['html'], 'figure'), + (namespaces['html'], 'footer'), + (namespaces['html'], 'font'), + (namespaces['html'], 'form'), + (namespaces['html'], 'header'), + (namespaces['html'], 'h1'), + (namespaces['html'], 'h2'), + (namespaces['html'], 'h3'), + (namespaces['html'], 'h4'), + (namespaces['html'], 'h5'), + (namespaces['html'], 'h6'), + (namespaces['html'], 'hr'), + (namespaces['html'], 'i'), + (namespaces['html'], 'img'), + (namespaces['html'], 'input'), + (namespaces['html'], 'ins'), + (namespaces['html'], 'keygen'), + (namespaces['html'], 'kbd'), + (namespaces['html'], 'label'), + (namespaces['html'], 'legend'), + (namespaces['html'], 'li'), + (namespaces['html'], 'm'), + (namespaces['html'], 'map'), + (namespaces['html'], 'menu'), + (namespaces['html'], 'meter'), + (namespaces['html'], 'multicol'), + (namespaces['html'], 'nav'), + (namespaces['html'], 'nextid'), + (namespaces['html'], 'ol'), + (namespaces['html'], 'output'), + (namespaces['html'], 'optgroup'), + (namespaces['html'], 'option'), + (namespaces['html'], 'p'), + (namespaces['html'], 'pre'), + (namespaces['html'], 'progress'), + (namespaces['html'], 'q'), + (namespaces['html'], 's'), + (namespaces['html'], 'samp'), + (namespaces['html'], 'section'), + (namespaces['html'], 'select'), + (namespaces['html'], 'small'), + (namespaces['html'], 'sound'), + (namespaces['html'], 'source'), + (namespaces['html'], 'spacer'), + (namespaces['html'], 'span'), + (namespaces['html'], 'strike'), + (namespaces['html'], 'strong'), + (namespaces['html'], 'sub'), + (namespaces['html'], 'sup'), + (namespaces['html'], 'table'), + (namespaces['html'], 'tbody'), + (namespaces['html'], 'td'), + (namespaces['html'], 'textarea'), + (namespaces['html'], 'time'), + (namespaces['html'], 'tfoot'), + (namespaces['html'], 'th'), + (namespaces['html'], 'thead'), + (namespaces['html'], 'tr'), + (namespaces['html'], 'tt'), + (namespaces['html'], 'u'), + (namespaces['html'], 'ul'), + (namespaces['html'], 'var'), + (namespaces['html'], 'video'), + (namespaces['mathml'], 'maction'), + (namespaces['mathml'], 'math'), + (namespaces['mathml'], 'merror'), + (namespaces['mathml'], 'mfrac'), + (namespaces['mathml'], 'mi'), + (namespaces['mathml'], 'mmultiscripts'), + (namespaces['mathml'], 'mn'), + (namespaces['mathml'], 'mo'), + (namespaces['mathml'], 'mover'), + (namespaces['mathml'], 'mpadded'), + (namespaces['mathml'], 'mphantom'), + (namespaces['mathml'], 'mprescripts'), + (namespaces['mathml'], 'mroot'), + (namespaces['mathml'], 'mrow'), + (namespaces['mathml'], 'mspace'), + (namespaces['mathml'], 'msqrt'), + (namespaces['mathml'], 'mstyle'), + (namespaces['mathml'], 'msub'), + (namespaces['mathml'], 'msubsup'), + (namespaces['mathml'], 'msup'), + (namespaces['mathml'], 'mtable'), + (namespaces['mathml'], 'mtd'), + (namespaces['mathml'], 'mtext'), + (namespaces['mathml'], 'mtr'), + (namespaces['mathml'], 'munder'), + (namespaces['mathml'], 'munderover'), + (namespaces['mathml'], 'none'), + (namespaces['svg'], 'a'), + (namespaces['svg'], 'animate'), + (namespaces['svg'], 'animateColor'), + (namespaces['svg'], 'animateMotion'), + (namespaces['svg'], 'animateTransform'), + (namespaces['svg'], 'clipPath'), + (namespaces['svg'], 'circle'), + (namespaces['svg'], 'defs'), + (namespaces['svg'], 'desc'), + (namespaces['svg'], 'ellipse'), + (namespaces['svg'], 'font-face'), + (namespaces['svg'], 'font-face-name'), + (namespaces['svg'], 'font-face-src'), + (namespaces['svg'], 'g'), + (namespaces['svg'], 'glyph'), + (namespaces['svg'], 'hkern'), + (namespaces['svg'], 'linearGradient'), + (namespaces['svg'], 'line'), + (namespaces['svg'], 'marker'), + (namespaces['svg'], 'metadata'), + (namespaces['svg'], 'missing-glyph'), + (namespaces['svg'], 'mpath'), + (namespaces['svg'], 'path'), + (namespaces['svg'], 'polygon'), + (namespaces['svg'], 'polyline'), + (namespaces['svg'], 'radialGradient'), + (namespaces['svg'], 'rect'), + (namespaces['svg'], 'set'), + (namespaces['svg'], 'stop'), + (namespaces['svg'], 'svg'), + (namespaces['svg'], 'switch'), + (namespaces['svg'], 'text'), + (namespaces['svg'], 'title'), + (namespaces['svg'], 'tspan'), + (namespaces['svg'], 'use'), +)) + +allowed_attributes = frozenset(( + # HTML attributes + (None, 'abbr'), + (None, 'accept'), + (None, 'accept-charset'), + (None, 'accesskey'), + (None, 'action'), + (None, 'align'), + (None, 'alt'), + (None, 'autocomplete'), + (None, 'autofocus'), + (None, 'axis'), + (None, 'background'), + (None, 'balance'), + (None, 'bgcolor'), + (None, 'bgproperties'), + (None, 'border'), + (None, 'bordercolor'), + (None, 'bordercolordark'), + (None, 'bordercolorlight'), + (None, 'bottompadding'), + (None, 'cellpadding'), + (None, 'cellspacing'), + (None, 'ch'), + (None, 'challenge'), + (None, 'char'), + (None, 'charoff'), + (None, 'choff'), + (None, 'charset'), + (None, 'checked'), + (None, 'cite'), + (None, 'class'), + (None, 'clear'), + (None, 'color'), + (None, 'cols'), + (None, 'colspan'), + (None, 'compact'), + (None, 'contenteditable'), + (None, 'controls'), + (None, 'coords'), + (None, 'data'), + (None, 'datafld'), + (None, 'datapagesize'), + (None, 'datasrc'), + (None, 'datetime'), + (None, 'default'), + (None, 'delay'), + (None, 'dir'), + (None, 'disabled'), + (None, 'draggable'), + (None, 'dynsrc'), + (None, 'enctype'), + (None, 'end'), + (None, 'face'), + (None, 'for'), + (None, 'form'), + (None, 'frame'), + (None, 'galleryimg'), + (None, 'gutter'), + (None, 'headers'), + (None, 'height'), + (None, 'hidefocus'), + (None, 'hidden'), + (None, 'high'), + (None, 'href'), + (None, 'hreflang'), + (None, 'hspace'), + (None, 'icon'), + (None, 'id'), + (None, 'inputmode'), + (None, 'ismap'), + (None, 'keytype'), + (None, 'label'), + (None, 'leftspacing'), + (None, 'lang'), + (None, 'list'), + (None, 'longdesc'), + (None, 'loop'), + (None, 'loopcount'), + (None, 'loopend'), + (None, 'loopstart'), + (None, 'low'), + (None, 'lowsrc'), + (None, 'max'), + (None, 'maxlength'), + (None, 'media'), + (None, 'method'), + (None, 'min'), + (None, 'multiple'), + (None, 'name'), + (None, 'nohref'), + (None, 'noshade'), + (None, 'nowrap'), + (None, 'open'), + (None, 'optimum'), + (None, 'pattern'), + (None, 'ping'), + (None, 'point-size'), + (None, 'poster'), + (None, 'pqg'), + (None, 'preload'), + (None, 'prompt'), + (None, 'radiogroup'), + (None, 'readonly'), + (None, 'rel'), + (None, 'repeat-max'), + (None, 'repeat-min'), + (None, 'replace'), + (None, 'required'), + (None, 'rev'), + (None, 'rightspacing'), + (None, 'rows'), + (None, 'rowspan'), + (None, 'rules'), + (None, 'scope'), + (None, 'selected'), + (None, 'shape'), + (None, 'size'), + (None, 'span'), + (None, 'src'), + (None, 'start'), + (None, 'step'), + (None, 'style'), + (None, 'summary'), + (None, 'suppress'), + (None, 'tabindex'), + (None, 'target'), + (None, 'template'), + (None, 'title'), + (None, 'toppadding'), + (None, 'type'), + (None, 'unselectable'), + (None, 'usemap'), + (None, 'urn'), + (None, 'valign'), + (None, 'value'), + (None, 'variable'), + (None, 'volume'), + (None, 'vspace'), + (None, 'vrml'), + (None, 'width'), + (None, 'wrap'), + (namespaces['xml'], 'lang'), + # MathML attributes + (None, 'actiontype'), + (None, 'align'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnalign'), + (None, 'columnlines'), + (None, 'columnspacing'), + (None, 'columnspan'), + (None, 'depth'), + (None, 'display'), + (None, 'displaystyle'), + (None, 'equalcolumns'), + (None, 'equalrows'), + (None, 'fence'), + (None, 'fontstyle'), + (None, 'fontweight'), + (None, 'frame'), + (None, 'height'), + (None, 'linethickness'), + (None, 'lspace'), + (None, 'mathbackground'), + (None, 'mathcolor'), + (None, 'mathvariant'), + (None, 'mathvariant'), + (None, 'maxsize'), + (None, 'minsize'), + (None, 'other'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowalign'), + (None, 'rowlines'), + (None, 'rowspacing'), + (None, 'rowspan'), + (None, 'rspace'), + (None, 'scriptlevel'), + (None, 'selection'), + (None, 'separator'), + (None, 'stretchy'), + (None, 'width'), + (None, 'width'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'type'), + # SVG attributes + (None, 'accent-height'), + (None, 'accumulate'), + (None, 'additive'), + (None, 'alphabetic'), + (None, 'arabic-form'), + (None, 'ascent'), + (None, 'attributeName'), + (None, 'attributeType'), + (None, 'baseProfile'), + (None, 'bbox'), + (None, 'begin'), + (None, 'by'), + (None, 'calcMode'), + (None, 'cap-height'), + (None, 'class'), + (None, 'clip-path'), + (None, 'color'), + (None, 'color-rendering'), + (None, 'content'), + (None, 'cx'), + (None, 'cy'), + (None, 'd'), + (None, 'dx'), + (None, 'dy'), + (None, 'descent'), + (None, 'display'), + (None, 'dur'), + (None, 'end'), + (None, 'fill'), + (None, 'fill-opacity'), + (None, 'fill-rule'), + (None, 'font-family'), + (None, 'font-size'), + (None, 'font-stretch'), + (None, 'font-style'), + (None, 'font-variant'), + (None, 'font-weight'), + (None, 'from'), + (None, 'fx'), + (None, 'fy'), + (None, 'g1'), + (None, 'g2'), + (None, 'glyph-name'), + (None, 'gradientUnits'), + (None, 'hanging'), + (None, 'height'), + (None, 'horiz-adv-x'), + (None, 'horiz-origin-x'), + (None, 'id'), + (None, 'ideographic'), + (None, 'k'), + (None, 'keyPoints'), + (None, 'keySplines'), + (None, 'keyTimes'), + (None, 'lang'), + (None, 'marker-end'), + (None, 'marker-mid'), + (None, 'marker-start'), + (None, 'markerHeight'), + (None, 'markerUnits'), + (None, 'markerWidth'), + (None, 'mathematical'), + (None, 'max'), + (None, 'min'), + (None, 'name'), + (None, 'offset'), + (None, 'opacity'), + (None, 'orient'), + (None, 'origin'), + (None, 'overline-position'), + (None, 'overline-thickness'), + (None, 'panose-1'), + (None, 'path'), + (None, 'pathLength'), + (None, 'points'), + (None, 'preserveAspectRatio'), + (None, 'r'), + (None, 'refX'), + (None, 'refY'), + (None, 'repeatCount'), + (None, 'repeatDur'), + (None, 'requiredExtensions'), + (None, 'requiredFeatures'), + (None, 'restart'), + (None, 'rotate'), + (None, 'rx'), + (None, 'ry'), + (None, 'slope'), + (None, 'stemh'), + (None, 'stemv'), + (None, 'stop-color'), + (None, 'stop-opacity'), + (None, 'strikethrough-position'), + (None, 'strikethrough-thickness'), + (None, 'stroke'), + (None, 'stroke-dasharray'), + (None, 'stroke-dashoffset'), + (None, 'stroke-linecap'), + (None, 'stroke-linejoin'), + (None, 'stroke-miterlimit'), + (None, 'stroke-opacity'), + (None, 'stroke-width'), + (None, 'systemLanguage'), + (None, 'target'), + (None, 'text-anchor'), + (None, 'to'), + (None, 'transform'), + (None, 'type'), + (None, 'u1'), + (None, 'u2'), + (None, 'underline-position'), + (None, 'underline-thickness'), + (None, 'unicode'), + (None, 'unicode-range'), + (None, 'units-per-em'), + (None, 'values'), + (None, 'version'), + (None, 'viewBox'), + (None, 'visibility'), + (None, 'width'), + (None, 'widths'), + (None, 'x'), + (None, 'x-height'), + (None, 'x1'), + (None, 'x2'), + (namespaces['xlink'], 'actuate'), + (namespaces['xlink'], 'arcrole'), + (namespaces['xlink'], 'href'), + (namespaces['xlink'], 'role'), + (namespaces['xlink'], 'show'), + (namespaces['xlink'], 'title'), + (namespaces['xlink'], 'type'), + (namespaces['xml'], 'base'), + (namespaces['xml'], 'lang'), + (namespaces['xml'], 'space'), + (None, 'y'), + (None, 'y1'), + (None, 'y2'), + (None, 'zoomAndPan'), +)) + +attr_val_is_uri = frozenset(( + (None, 'href'), + (None, 'src'), + (None, 'cite'), + (None, 'action'), + (None, 'longdesc'), + (None, 'poster'), + (None, 'background'), + (None, 'datasrc'), + (None, 'dynsrc'), + (None, 'lowsrc'), + (None, 'ping'), + (namespaces['xlink'], 'href'), + (namespaces['xml'], 'base'), +)) + +svg_attr_val_allows_ref = frozenset(( + (None, 'clip-path'), + (None, 'color-profile'), + (None, 'cursor'), + (None, 'fill'), + (None, 'filter'), + (None, 'marker'), + (None, 'marker-start'), + (None, 'marker-mid'), + (None, 'marker-end'), + (None, 'mask'), + (None, 'stroke'), +)) + +svg_allow_local_href = frozenset(( + (None, 'altGlyph'), + (None, 'animate'), + (None, 'animateColor'), + (None, 'animateMotion'), + (None, 'animateTransform'), + (None, 'cursor'), + (None, 'feImage'), + (None, 'filter'), + (None, 'linearGradient'), + (None, 'pattern'), + (None, 'radialGradient'), + (None, 'textpath'), + (None, 'tref'), + (None, 'set'), + (None, 'use') +)) + +allowed_css_properties = frozenset(( + 'azimuth', + 'background-color', + 'border-bottom-color', + 'border-collapse', + 'border-color', + 'border-left-color', + 'border-right-color', + 'border-top-color', + 'clear', + 'color', + 'cursor', + 'direction', + 'display', + 'elevation', + 'float', + 'font', + 'font-family', + 'font-size', + 'font-style', + 'font-variant', + 'font-weight', + 'height', + 'letter-spacing', + 'line-height', + 'overflow', + 'pause', + 'pause-after', + 'pause-before', + 'pitch', + 'pitch-range', + 'richness', + 'speak', + 'speak-header', + 'speak-numeral', + 'speak-punctuation', + 'speech-rate', + 'stress', + 'text-align', + 'text-decoration', + 'text-indent', + 'unicode-bidi', + 'vertical-align', + 'voice-family', + 'volume', + 'white-space', + 'width', +)) + +allowed_css_keywords = frozenset(( + 'auto', + 'aqua', + 'black', + 'block', + 'blue', + 'bold', + 'both', + 'bottom', + 'brown', + 'center', + 'collapse', + 'dashed', + 'dotted', + 'fuchsia', + 'gray', + 'green', + '!important', + 'italic', + 'left', + 'lime', + 'maroon', + 'medium', + 'none', + 'navy', + 'normal', + 'nowrap', + 'olive', + 'pointer', + 'purple', + 'red', + 'right', + 'solid', + 'silver', + 'teal', + 'top', + 'transparent', + 'underline', + 'white', + 'yellow', +)) + +allowed_svg_properties = frozenset(( + 'fill', + 'fill-opacity', + 'fill-rule', + 'stroke', + 'stroke-width', + 'stroke-linecap', + 'stroke-linejoin', + 'stroke-opacity', +)) + +allowed_protocols = frozenset(( + 'ed2k', + 'ftp', + 'http', + 'https', + 'irc', + 'mailto', + 'news', + 'gopher', + 'nntp', + 'telnet', + 'webcal', + 'xmpp', + 'callto', + 'feed', + 'urn', + 'aim', + 'rsync', + 'tag', + 'ssh', + 'sftp', + 'rtsp', + 'afs', + 'data', +)) + +allowed_content_types = frozenset(( + 'image/png', + 'image/jpeg', + 'image/gif', + 'image/webp', + 'image/bmp', + 'text/plain', +)) + + +data_content_type = re.compile(r''' + ^ + # Match a content type / + (?P[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) + # Match any character set and encoding + (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) + |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) + # Assume the rest is data + ,.* + $ + ''', + re.VERBOSE) + + +class Filter(base.Filter): + """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes""" + def __init__(self, + source, + allowed_elements=allowed_elements, + allowed_attributes=allowed_attributes, + allowed_css_properties=allowed_css_properties, + allowed_css_keywords=allowed_css_keywords, + allowed_svg_properties=allowed_svg_properties, + allowed_protocols=allowed_protocols, + allowed_content_types=allowed_content_types, + attr_val_is_uri=attr_val_is_uri, + svg_attr_val_allows_ref=svg_attr_val_allows_ref, + svg_allow_local_href=svg_allow_local_href): + """Creates a Filter + + :arg allowed_elements: set of elements to allow--everything else will + be escaped + + :arg allowed_attributes: set of attributes to allow in + elements--everything else will be stripped + + :arg allowed_css_properties: set of CSS properties to allow--everything + else will be stripped + + :arg allowed_css_keywords: set of CSS keywords to allow--everything + else will be stripped + + :arg allowed_svg_properties: set of SVG properties to allow--everything + else will be removed + + :arg allowed_protocols: set of allowed protocols for URIs + + :arg allowed_content_types: set of allowed content types for ``data`` URIs. + + :arg attr_val_is_uri: set of attributes that have URI values--values + that have a scheme not listed in ``allowed_protocols`` are removed + + :arg svg_attr_val_allows_ref: set of SVG attributes that can have + references + + :arg svg_allow_local_href: set of SVG elements that can have local + hrefs--these are removed + + """ + super(Filter, self).__init__(source) + + warnings.warn(_deprecation_msg, DeprecationWarning) + + self.allowed_elements = allowed_elements + self.allowed_attributes = allowed_attributes + self.allowed_css_properties = allowed_css_properties + self.allowed_css_keywords = allowed_css_keywords + self.allowed_svg_properties = allowed_svg_properties + self.allowed_protocols = allowed_protocols + self.allowed_content_types = allowed_content_types + self.attr_val_is_uri = attr_val_is_uri + self.svg_attr_val_allows_ref = svg_attr_val_allows_ref + self.svg_allow_local_href = svg_allow_local_href + + def __iter__(self): + for token in base.Filter.__iter__(self): + token = self.sanitize_token(token) + if token: + yield token + + # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and + # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes + # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and + # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI + # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are + # allowed. + # + # sanitize_html('') + # => <script> do_nasty_stuff() </script> + # sanitize_html('Click here for $100') + # => Click here for $100 + def sanitize_token(self, token): + + # accommodate filters which use token_type differently + token_type = token["type"] + if token_type in ("StartTag", "EndTag", "EmptyTag"): + name = token["name"] + namespace = token["namespace"] + if ((namespace, name) in self.allowed_elements or + (namespace is None and + (namespaces["html"], name) in self.allowed_elements)): + return self.allowed_token(token) + else: + return self.disallowed_token(token) + elif token_type == "Comment": + pass + else: + return token + + def allowed_token(self, token): + if "data" in token: + attrs = token["data"] + attr_names = set(attrs.keys()) + + # Remove forbidden attributes + for to_remove in (attr_names - self.allowed_attributes): + del token["data"][to_remove] + attr_names.remove(to_remove) + + # Remove attributes with disallowed URL values + for attr in (attr_names & self.attr_val_is_uri): + assert attr in attrs + # I don't have a clue where this regexp comes from or why it matches those + # characters, nor why we call unescape. I just know it's always been here. + # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all + # this will do is remove *more* than it otherwise would. + val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', + unescape(attrs[attr])).lower() + # remove replacement characters from unescaped characters + val_unescaped = val_unescaped.replace("\ufffd", "") + try: + uri = urlparse.urlparse(val_unescaped) + except ValueError: + uri = None + del attrs[attr] + if uri and uri.scheme: + if uri.scheme not in self.allowed_protocols: + del attrs[attr] + if uri.scheme == 'data': + m = data_content_type.match(uri.path) + if not m: + del attrs[attr] + elif m.group('content_type') not in self.allowed_content_types: + del attrs[attr] + + for attr in self.svg_attr_val_allows_ref: + if attr in attrs: + attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', + ' ', + unescape(attrs[attr])) + if (token["name"] in self.svg_allow_local_href and + (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', + attrs[(namespaces['xlink'], 'href')])): + del attrs[(namespaces['xlink'], 'href')] + if (None, 'style') in attrs: + attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) + token["data"] = attrs + return token + + def disallowed_token(self, token): + token_type = token["type"] + if token_type == "EndTag": + token["data"] = "" % token["name"] + elif token["data"]: + assert token_type in ("StartTag", "EmptyTag") + attrs = [] + for (ns, name), v in token["data"].items(): + attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) + token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) + else: + token["data"] = "<%s>" % token["name"] + if token.get("selfClosing"): + token["data"] = token["data"][:-1] + "/>" + + token["type"] = "Characters" + + del token["name"] + return token + + def sanitize_css(self, style): + # disallow urls + style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) + + # gauntlet + if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): + return '' + if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): + return '' + + clean = [] + for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): + if not value: + continue + if prop.lower() in self.allowed_css_properties: + clean.append(prop + ': ' + value + ';') + elif prop.split('-')[0].lower() in ['background', 'border', 'margin', + 'padding']: + for keyword in value.split(): + if keyword not in self.allowed_css_keywords and \ + not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa + break + else: + clean.append(prop + ': ' + value + ';') + elif prop.lower() in self.allowed_svg_properties: + clean.append(prop + ': ' + value + ';') + + return ' '.join(clean) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 000000000..0d12584b4 --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/filters/whitespace.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +import re + +from . import base +from ..constants import rcdataElements, spaceCharacters +spaceCharacters = "".join(spaceCharacters) + +SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) + + +class Filter(base.Filter): + """Collapses whitespace except in pre, textarea, and script elements""" + spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) + + def __iter__(self): + preserve = 0 + for token in base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag" \ + and (preserve or token["name"] in self.spacePreserveElements): + preserve += 1 + + elif type == "EndTag" and preserve: + preserve -= 1 + + elif not preserve and type == "SpaceCharacters" and token["data"]: + # Test on token["data"] above to not introduce spaces where there were not + token["data"] = " " + + elif not preserve and type == "Characters": + token["data"] = collapse_spaces(token["data"]) + + yield token + + +def collapse_spaces(text): + return SPACES_REGEX.sub(' ', text) diff --git a/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/html5parser.py b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 000000000..d06784f3d --- /dev/null +++ b/myenv/lib/python3.10/site-packages/pip/_vendor/html5lib/html5parser.py @@ -0,0 +1,2795 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import with_metaclass, viewkeys + +import types + +from . import _inputstream +from . import _tokenizer + +from . import treebuilders +from .treebuilders.base import Marker + +from . import _utils +from .constants import ( + spaceCharacters, asciiUpper2Lower, + specialElements, headingElements, cdataElements, rcdataElements, + tokenTypes, tagTokenTypes, + namespaces, + htmlIntegrationPointElements, mathmlTextIntegrationPointElements, + adjustForeignAttributes as adjustForeignAttributesMap, + adjustMathMLAttributes, adjustSVGAttributes, + E, + _ReparseException +) + + +def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML document as a string or file-like object into a tree + + :arg doc: the document to parse as a string or file-like object + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import parse + >>> parse('

This is a doc

') + + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parse(doc, **kwargs) + + +def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): + """Parse an HTML fragment as a string or file-like object into a tree + + :arg doc: the fragment to parse as a string or file-like object + + :arg container: the container context to parse the fragment in + + :arg treebuilder: the treebuilder to use when parsing + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import parseFragment + >>> parseFragment('this is a fragment') + + + """ + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parseFragment(doc, container=container, **kwargs) + + +def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + + classDict[attributeName] = attribute + return type.__new__(meta, classname, bases, classDict) + return Decorated + + +class HTMLParser(object): + """HTML parser + + Generates a tree structure from a stream of (possibly malformed) HTML. + + """ + + def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): + """ + :arg tree: a treebuilder class controlling the type of tree that will be + returned. Built in treebuilders can be accessed through + html5lib.treebuilders.getTreeBuilder(treeType) + + :arg strict: raise an exception when a parse error is encountered + + :arg namespaceHTMLElements: whether or not to namespace HTML elements + + :arg debug: whether or not to enable debug mode which logs things + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() # generates parser with etree builder + >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict + + """ + + # Raise an exception on the first error encountered + self.strict = strict + + if tree is None: + tree = treebuilders.getTreeBuilder("etree") + self.tree = tree(namespaceHTMLElements) + self.errors = [] + + self.phases = {name: cls(self, self.tree) for name, cls in + getPhases(debug).items()} + + def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): + + self.innerHTMLMode = innerHTML + self.container = container + self.scripting = scripting + self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) + self.reset() + + try: + self.mainLoop() + except _ReparseException: + self.reset() + self.mainLoop() + + def reset(self): + self.tree.reset() + self.firstStartTag = False + self.errors = [] + self.log = [] # only used with debug mode + # "quirks" / "limited quirks" / "no quirks" + self.compatMode = "no quirks" + + if self.innerHTMLMode: + self.innerHTML = self.container.lower() + + if self.innerHTML in cdataElements: + self.tokenizer.state = self.tokenizer.rcdataState + elif self.innerHTML in rcdataElements: + self.tokenizer.state = self.tokenizer.rawtextState + elif self.innerHTML == 'plaintext': + self.tokenizer.state = self.tokenizer.plaintextState + else: + # state already is data state + # self.tokenizer.state = self.tokenizer.dataState + pass + self.phase = self.phases["beforeHtml"] + self.phase.insertHtmlElement() + self.resetInsertionMode() + else: + self.innerHTML = False # pylint:disable=redefined-variable-type + self.phase = self.phases["initial"] + + self.lastPhase = None + + self.beforeRCDataPhase = None + + self.framesetOK = True + + @property + def documentEncoding(self): + """Name of the character encoding that was used to decode the input stream, or + :obj:`None` if that is not determined yet + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0].name + + def isHTMLIntegrationPoint(self, element): + if (element.name == "annotation-xml" and + element.namespace == namespaces["mathml"]): + return ("encoding" in element.attributes and + element.attributes["encoding"].translate( + asciiUpper2Lower) in + ("text/html", "application/xhtml+xml")) + else: + return (element.namespace, element.name) in htmlIntegrationPointElements + + def isMathMLTextIntegrationPoint(self, element): + return (element.namespace, element.name) in mathmlTextIntegrationPointElements + + def mainLoop(self): + CharactersToken = tokenTypes["Characters"] + SpaceCharactersToken = tokenTypes["SpaceCharacters"] + StartTagToken = tokenTypes["StartTag"] + EndTagToken = tokenTypes["EndTag"] + CommentToken = tokenTypes["Comment"] + DoctypeToken = tokenTypes["Doctype"] + ParseErrorToken = tokenTypes["ParseError"] + + for token in self.tokenizer: + prev_token = None + new_token = token + while new_token is not None: + prev_token = new_token + currentNode = self.tree.openElements[-1] if self.tree.openElements else None + currentNodeNamespace = currentNode.namespace if currentNode else None + currentNodeName = currentNode.name if currentNode else None + + type = new_token["type"] + + if type == ParseErrorToken: + self.parseError(new_token["data"], new_token.get("datavars", {})) + new_token = None + else: + if (len(self.tree.openElements) == 0 or + currentNodeNamespace == self.tree.defaultNamespace or + (self.isMathMLTextIntegrationPoint(currentNode) and + ((type == StartTagToken and + token["name"] not in frozenset(["mglyph", "malignmark"])) or + type in (CharactersToken, SpaceCharactersToken))) or + (currentNodeNamespace == namespaces["mathml"] and + currentNodeName == "annotation-xml" and + type == StartTagToken and + token["name"] == "svg") or + (self.isHTMLIntegrationPoint(currentNode) and + type in (StartTagToken, CharactersToken, SpaceCharactersToken))): + phase = self.phase + else: + phase = self.phases["inForeignContent"] + + if type == CharactersToken: + new_token = phase.processCharacters(new_token) + elif type == SpaceCharactersToken: + new_token = phase.processSpaceCharacters(new_token) + elif type == StartTagToken: + new_token = phase.processStartTag(new_token) + elif type == EndTagToken: + new_token = phase.processEndTag(new_token) + elif type == CommentToken: + new_token = phase.processComment(new_token) + elif type == DoctypeToken: + new_token = phase.processDoctype(new_token) + + if (type == StartTagToken and prev_token["selfClosing"] and + not prev_token["selfClosingAcknowledged"]): + self.parseError("non-void-element-with-trailing-solidus", + {"name": prev_token["name"]}) + + # When the loop finishes it's EOF + reprocess = True + phases = [] + while reprocess: + phases.append(self.phase) + reprocess = self.phase.processEOF() + if reprocess: + assert self.phase not in phases + + def parse(self, stream, *args, **kwargs): + """Parse a HTML document into a well-formed tree + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element). + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5parser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parse('

This is a doc

') + + + """ + self._parse(stream, False, None, *args, **kwargs) + return self.tree.getDocument() + + def parseFragment(self, stream, *args, **kwargs): + """Parse a HTML fragment into a well-formed tree fragment + + :arg container: name of the element we're setting the innerHTML + property if set to None, default to 'div' + + :arg stream: a file-like object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + + :arg scripting: treat noscript elements as if JavaScript was turned on + + :returns: parsed tree + + Example: + + >>> from html5lib.html5libparser import HTMLParser + >>> parser = HTMLParser() + >>> parser.parseFragment('this is a fragment') + + + """ + self._parse(stream, True, *args, **kwargs) + return self.tree.getFragment() + + def parseError(self, errorcode="XXX-undefined-error", datavars=None): + # XXX The idea is to make errorcode mandatory. + if datavars is None: + datavars = {} + self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) + if self.strict: + raise ParseError(E[errorcode] % datavars) + + def adjustMathMLAttributes(self, token): + adjust_attributes(token, adjustMathMLAttributes) + + def adjustSVGAttributes(self, token): + adjust_attributes(token, adjustSVGAttributes) + + def adjustForeignAttributes(self, token): + adjust_attributes(token, adjustForeignAttributesMap) + + def reparseTokenNormal(self, token): + # pylint:disable=unused-argument + self.parser.phase() + + def resetInsertionMode(self): + # The name of this method is mostly historical. (It's also used in the + # specification.) + last = False + newModes = { + "select": "inSelect", + "td": "inCell", + "th": "inCell", + "tr": "inRow", + "tbody": "inTableBody", + "thead": "inTableBody", + "tfoot": "inTableBody", + "caption": "inCaption", + "colgroup": "inColumnGroup", + "table": "inTable", + "head": "inBody", + "body": "inBody", + "frameset": "inFrameset", + "html": "beforeHead" + } + for node in self.tree.openElements[::-1]: + nodeName = node.name + new_phase = None + if node == self.tree.openElements[0]: + assert self.innerHTML + last = True + nodeName = self.innerHTML + # Check for conditions that should only happen in the innerHTML + # case + if nodeName in ("select", "colgroup", "head", "html"): + assert self.innerHTML + + if not last and node.namespace != self.tree.defaultNamespace: + continue + + if nodeName in newModes: + new_phase = self.phases[newModes[nodeName]] + break + elif last: + new_phase = self.phases["inBody"] + break + + self.phase = new_phase + + def parseRCDataRawtext(self, token, contentType): + # Generic RCDATA/RAWTEXT Parsing algorithm + assert contentType in ("RAWTEXT", "RCDATA") + + self.tree.insertElement(token) + + if contentType == "RAWTEXT": + self.tokenizer.state = self.tokenizer.rawtextState + else: + self.tokenizer.state = self.tokenizer.rcdataState + + self.originalPhase = self.phase + + self.phase = self.phases["text"] + + +@_utils.memoize +def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" + type_names = {value: key for key, value in tokenTypes.items()} + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: + token = args[0] + info = {"type": type_names[token['type']]} + if token['type'] in tagTokenTypes: + info["name"] = token['name'] + + self.parser.log.append((self.parser.tokenizer.state.__name__, + self.parser.phase.__class__.__name__, + self.__class__.__name__, + function.__name__, + info)) + return function(self, *args, **kwargs) + else: + return function(self, *args, **kwargs) + return wrapped + + def getMetaclass(use_metaclass, metaclass_func): + if use_metaclass: + return method_decorator_metaclass(metaclass_func) + else: + return type + + # pylint:disable=unused-argument + class Phase(with_metaclass(getMetaclass(debug, log))): + """Base class for helper object that implements each phase of processing + """ + __slots__ = ("parser", "tree", "__startTagCache", "__endTagCache") + + def __init__(self, parser, tree): + self.parser = parser + self.tree = tree + self.__startTagCache = {} + self.__endTagCache = {} + + def processEOF(self): + raise NotImplementedError + + def processComment(self, token): + # For most phases the following is correct. Where it's not it will be + # overridden. + self.tree.insertComment(token, self.tree.openElements[-1]) + + def processDoctype(self, token): + self.parser.parseError("unexpected-doctype") + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processSpaceCharacters(self, token): + self.tree.insertText(token["data"]) + + def processStartTag(self, token): + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__startTagCache: + func = self.__startTagCache[name] + else: + func = self.__startTagCache[name] = self.startTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__startTagCache) > len(self.startTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__startTagCache.pop(next(iter(self.__startTagCache))) + return func(token) + + def startTagHtml(self, token): + if not self.parser.firstStartTag and token["name"] == "html": + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False + + def processEndTag(self, token): + # Note the caching is done here rather than BoundMethodDispatcher as doing it there + # requires a circular reference to the Phase, and this ends up with a significant + # (CPython 2.7, 3.8) GC cost when parsing many short inputs + name = token["name"] + # In Py2, using `in` is quicker in general than try/except KeyError + # In Py3, `in` is quicker when there are few cache hits (typically short inputs) + if name in self.__endTagCache: + func = self.__endTagCache[name] + else: + func = self.__endTagCache[name] = self.endTagHandler[name] + # bound the cache size in case we get loads of unknown tags + while len(self.__endTagCache) > len(self.endTagHandler) * 1.1: + # this makes the eviction policy random on Py < 3.7 and FIFO >= 3.7 + self.__endTagCache.pop(next(iter(self.__endTagCache))) + return func(token) + + class InitialPhase(Phase): + __slots__ = tuple() + + def processSpaceCharacters(self, token): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + correct = token["correct"] + + if (name != "html" or publicId is not None or + systemId is not None and systemId != "about:legacy-compat"): + self.parser.parseError("unknown-doctype") + + if publicId is None: + publicId = "" + + self.tree.insertDoctype(token) + + if publicId != "": + publicId = publicId.translate(asciiUpper2Lower) + + if (not correct or token["name"] != "html" or + publicId.startswith( + ("+//silmaril//dtd html pro v0r11 19970101//", + "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", + "-//as//dtd html 3.0 aswedit + extensions//", + "-//ietf//dtd html 2.0 level 1//", + "-//ietf//dtd html 2.0 level 2//", + "-//ietf//dtd html 2.0 strict level 1//", + "-//ietf//dtd html 2.0 strict level 2//", + "-//ietf//dtd html 2.0 strict//", + "-//ietf//dtd html 2.0//", + "-//ietf//dtd html 2.1e//", + "-//ietf//dtd html 3.0//", + "-//ietf//dtd html 3.2 final//", + "-//ietf//dtd html 3.2//", + "-//ietf//dtd html 3//", + "-//ietf//dtd html level 0//", + "-//ietf//dtd html level 1//", + "-//ietf//dtd html level 2//", + "-//ietf//dtd html level 3//", + "-//ietf//dtd html strict level 0//", + "-//ietf//dtd html strict level 1//", + "-//ietf//dtd html strict level 2//", + "-//ietf//dtd html strict level 3//", + "-//ietf//dtd html strict//", + "-//ietf//dtd html//", + "-//metrius//dtd metrius presentational//", + "-//microsoft//dtd internet explorer 2.0 html strict//", + "-//microsoft//dtd internet explorer 2.0 html//", + "-//microsoft//dtd internet explorer 2.0 tables//", + "-//microsoft//dtd internet explorer 3.0 html strict//", + "-//microsoft//dtd internet explorer 3.0 html//", + "-//microsoft//dtd internet explorer 3.0 tables//", + "-//netscape comm. corp.//dtd html//", + "-//netscape comm. corp.//dtd strict html//", + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", + "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", + "-//spyglass//dtd html 2.0 extended//", + "-//sq//dtd html 2.0 hotmetal + extensions//", + "-//sun microsystems corp.//dtd hotjava html//", + "-//sun microsystems corp.//dtd hotjava strict html//", + "-//w3c//dtd html 3 1995-03-24//", + "-//w3c//dtd html 3.2 draft//", + "-//w3c//dtd html 3.2 final//", + "-//w3c//dtd html 3.2//", + "-//w3c//dtd html 3.2s draft//", + "-//w3c//dtd html 4.0 frameset//", + "-//w3c//dtd html 4.0 transitional//", + "-//w3c//dtd html experimental 19960712//", + "-//w3c//dtd html experimental 970421//", + "-//w3c//dtd w3 html//", + "-//w3o//dtd w3 html 3.0//", + "-//webtechs//dtd mozilla html 2.0//", + "-//webtechs//dtd mozilla html//")) or + publicId in ("-//w3o//dtd w3 html strict 3.0//en//", + "-/w3c/dtd html 4.0 transitional/en", + "html") or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is None or + systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): + self.parser.compatMode = "quirks" + elif (publicId.startswith( + ("-//w3c//dtd xhtml 1.0 frameset//", + "-//w3c//dtd xhtml 1.0 transitional//")) or + publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is not None): + self.parser.compatMode = "limited quirks" + + self.parser.phase = self.parser.phases["beforeHtml"] + + def anythingElse(self): + self.parser.compatMode = "quirks" + self.parser.phase = self.parser.phases["beforeHtml"] + + def processCharacters(self, token): + self.parser.parseError("expected-doctype-but-got-chars") + self.anythingElse() + return token + + def processStartTag(self, token): + self.parser.parseError("expected-doctype-but-got-start-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEndTag(self, token): + self.parser.parseError("expected-doctype-but-got-end-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEOF(self): + self.parser.parseError("expected-doctype-but-got-eof") + self.anythingElse() + return True + + class BeforeHtmlPhase(Phase): + __slots__ = tuple() + + # helper methods + def insertHtmlElement(self): + self.tree.insertRoot(impliedTagToken("html", "StartTag")) + self.parser.phase = self.parser.phases["beforeHead"] + + # other + def processEOF(self): + self.insertHtmlElement() + return True + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.insertHtmlElement() + return token + + def processStartTag(self, token): + if token["name"] == "html": + self.parser.firstStartTag = True + self.insertHtmlElement() + return token + + def processEndTag(self, token): + if token["name"] not in ("head", "body", "html", "br"): + self.parser.parseError("unexpected-end-tag-before-html", + {"name": token["name"]}) + else: + self.insertHtmlElement() + return token + + class BeforeHeadPhase(Phase): + __slots__ = tuple() + + def processEOF(self): + self.startTagHead(impliedTagToken("head", "StartTag")) + return True + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.tree.insertElement(token) + self.tree.headPointer = self.tree.openElements[-1] + self.parser.phase = self.parser.phases["inHead"] + + def startTagOther(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagImplyHead(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagOther(self, token): + self.parser.parseError("end-tag-after-implied-root", + {"name": token["name"]}) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + (("head", "body", "html", "br"), endTagImplyHead) + ]) + endTagHandler.default = endTagOther + + class InHeadPhase(Phase): + __slots__ = tuple() + + # the real thing + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.parser.parseError("two-heads-are-not-better-than-one") + + def startTagBaseLinkCommand(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMeta(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + attributes = token["data"] + if self.parser.tokenizer.stream.charEncoding[1] == "tentative": + if "charset" in attributes: + self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) + elif ("content" in attributes and + "http-equiv" in attributes and + attributes["http-equiv"].lower() == "content-type"): + # Encoding it as UTF-8 here is a hack, as really we should pass + # the abstract Unicode string, and just use the + # ContentAttrParser on that, but using UTF-8 allows all chars + # to be encoded and as a ASCII-superset works. + data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) + parser = _inputstream.ContentAttrParser(data) + codec = parser.parse() + self.parser.tokenizer.stream.changeEncoding(codec) + + def startTagTitle(self, token): + self.parser.parseRCDataRawtext(token, "RCDATA") + + def startTagNoFramesStyle(self, token): + # Need to decide whether to implement the scripting-disabled case + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagNoscript(self, token): + if self.parser.scripting: + self.parser.parseRCDataRawtext(token, "RAWTEXT") + else: + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inHeadNoscript"] + + def startTagScript(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState + self.parser.originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["text"] + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHead(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "head", "Expected head got %s" % node.name + self.parser.phase = self.parser.phases["afterHead"] + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.endTagHead(impliedTagToken("head")) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("title", startTagTitle), + (("noframes", "style"), startTagNoFramesStyle), + ("noscript", startTagNoscript), + ("script", startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + startTagBaseLinkCommand), + ("meta", startTagMeta), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("head", endTagHead), + (("br", "html", "body"), endTagHtmlBodyBr) + ]) + endTagHandler.default = endTagOther + + class InHeadNoscriptPhase(Phase): + __slots__ = tuple() + + def processEOF(self): + self.parser.parseError("eof-in-head-noscript") + self.anythingElse() + return True + + def processComment(self, token): + return self.parser.phases["inHead"].processComment(token) + + def processCharacters(self, token): + self.parser.parseError("char-in-head-noscript") + self.anythingElse() + return token + + def processSpaceCharacters(self, token): + return self.parser.phases["inHead"].processSpaceCharacters(token) + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBaseLinkCommand(self, token): + return self.parser.phases["inHead"].processStartTag(token) + + def startTagHeadNoscript(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagNoscript(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "noscript", "Expected noscript got %s" % node.name + self.parser.phase = self.parser.phases["inHead"] + + def endTagBr(self, token): + self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + # Caller must raise parse error first! + self.endTagNoscript(impliedTagToken("noscript")) + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + (("basefont", "bgsound", "link", "meta", "noframes", "style"), startTagBaseLinkCommand), + (("head", "noscript"), startTagHeadNoscript), + ]) + startTagHandler.default = startTagOther + + endTagHandler = _utils.MethodDispatcher([ + ("noscript", endTagNoscript), + ("br", endTagBr), + ]) + endTagHandler.default = endTagOther + + class AfterHeadPhase(Phase): + __slots__ = tuple() + + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBody(self, token): + self.parser.framesetOK = False + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inBody"] + + def startTagFrameset(self, token): + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagFromHead(self, token): + self.parser.parseError("unexpected-start-tag-out-of-my-head", + {"name": token["name"]}) + self.tree.openElements.append(self.tree.headPointer) + self.parser.phases["inHead"].processStartTag(token) + for node in self.tree.openElements[::-1]: + if node.name == "head": + self.tree.openElements.remove(node) + break + + def startTagHead(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.tree.insertElement(impliedTagToken("body", "StartTag")) + self.parser.phase = self.parser.phases["inBody"] + self.parser.framesetOK = True + + startTagHandler = _utils.MethodDispatcher([ + ("html", startTagHtml), + ("body", startTagBody), + ("frameset", startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + startTagFromHead), + ("head", startTagHead) + ]) + startTagHandler.default = startTagOther + endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), + endTagHtmlBodyBr)]) + endTagHandler.default = endTagOther + + class InBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody + # the really-really-really-very crazy mode + __slots__ = ("processSpaceCharacters",) + + def __init__(self, *args, **kwargs): + super(InBodyPhase, self).__init__(*args, **kwargs) + # Set this to the default handler + self.processSpaceCharacters = self.processSpaceCharactersNonPre + + def isMatchingFormattingElement(self, node1, node2): + return (node1.name == node2.name and + node1.namespace == node2.namespace and + node1.attributes == node2.attributes) + + # helper + def addFormattingElement(self, token): + self.tree.insertElement(token) + element = self.tree.openElements[-1] + + matchingElements = [] + for node in self.tree.activeFormattingElements[::-1]: + if node is Marker: + break + elif self.isMatchingFormattingElement(node, element): + matchingElements.append(node) + + assert len(matchingElements) <= 3 + if len(matchingElements) == 3: + self.tree.activeFormattingElements.remove(matchingElements[-1]) + self.tree.activeFormattingElements.append(element) + + # the real deal + def processEOF(self): + allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", + "tfoot", "th", "thead", "tr", "body", + "html")) + for node in self.tree.openElements[::-1]: + if node.name not in allowed_elements: + self.parser.parseError("expected-closing-tag-but-got-eof") + break + # Stop parsing + + def processSpaceCharactersDropNewline(self, token): + # Sometimes (start of
, , and