tldr: refactoring

This commit is contained in:
Romain J 2019-12-16 18:12:10 +01:00
commit f42b2194cd
2881 changed files with 568359 additions and 388 deletions
venv

76
venv/bin/activate Normal file
View file

@ -0,0 +1,76 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/home/romain/gnousEU/tuxbot-bot/venv"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
if [ "x(venv) " != x ] ; then
PS1="(venv) ${PS1:-}"
else
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
else
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
fi
fi
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r
fi

37
venv/bin/activate.csh Normal file
View file

@ -0,0 +1,37 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/home/romain/gnousEU/tuxbot-bot/venv"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
if ("venv" != "") then
set env_name = "venv"
else
if (`basename "VIRTUAL_ENV"` == "__") then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
else
set env_name = `basename "$VIRTUAL_ENV"`
endif
endif
set prompt = "[$env_name] $prompt"
unset env_name
endif
alias pydoc python -m pydoc
rehash

75
venv/bin/activate.fish Normal file
View file

@ -0,0 +1,75 @@
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
# you cannot run it directly
function deactivate -d "Exit virtualenv and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
functions -e fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
# Self destruct!
functions -e deactivate
end
end
# unset irrelevant variables
deactivate nondestructive
set -gx VIRTUAL_ENV "/home/romain/gnousEU/tuxbot-bot/venv"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# unset PYTHONHOME if set
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# save the current fish_prompt function as the function _old_fish_prompt
functions -c fish_prompt _old_fish_prompt
# with the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command
set -l old_status $status
# Prompt override?
if test -n "(venv) "
printf "%s%s" "(venv) " (set_color normal)
else
# ...Otherwise, prepend env
set -l _checkbase (basename "$VIRTUAL_ENV")
if test $_checkbase = "__"
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
else
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
end
end
# Restore the return status of the previous command.
echo "exit $old_status" | .
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end

10
venv/bin/chardetect Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/easy_install Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/easy_install-3.7 Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/import_expression Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from import_expression.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/pip Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/pip3 Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/pip3.7 Executable file
View file

@ -0,0 +1,10 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
venv/bin/pybabel Executable file
View file

@ -0,0 +1,8 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from babel.messages.frontend import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
venv/bin/python Symbolic link
View file

@ -0,0 +1 @@
python3.7

1
venv/bin/python3 Symbolic link
View file

@ -0,0 +1 @@
python3.7

1
venv/bin/python3.7 Symbolic link
View file

@ -0,0 +1 @@
/usr/local/bin/python3.7

8
venv/bin/tcp-latency Executable file
View file

@ -0,0 +1,8 @@
#!/home/romain/gnousEU/tuxbot-bot/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from tcp_latency.tcp_latency import _main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(_main())

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,29 @@
Copyright (c) 2013-2019 by the Babel Team, see AUTHORS for more information.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
3. The name of the author may not be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,31 @@
Metadata-Version: 2.1
Name: Babel
Version: 2.7.0
Summary: Internationalization utilities
Home-page: http://babel.pocoo.org/
Author: Armin Ronacher
Author-email: armin.ronacher@active-4.com
License: BSD
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Requires-Dist: pytz (>=2015.7)
A collection of tools for internationalizing Python applications.

View file

@ -0,0 +1,815 @@
../../../bin/pybabel,sha256=X97VAhJYxnLuo8P_afP6cjXSHxOVROguRI628oADXjA,256
Babel-2.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
Babel-2.7.0.dist-info/LICENSE,sha256=Wg311G6MsUvV1MLNdzOLIvaB_oi9NCNmJrGjrjtQEBo,1451
Babel-2.7.0.dist-info/METADATA,sha256=Sakpsddm3zC_mjuf8Xl2jlGr9l6fN3QH5TZOJCKBxbs,1223
Babel-2.7.0.dist-info/RECORD,,
Babel-2.7.0.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110
Babel-2.7.0.dist-info/entry_points.txt,sha256=dyIkorJhQj3IvTvmMylr1wEzW7vfxTw5RTOWa8zoqh0,764
Babel-2.7.0.dist-info/top_level.txt,sha256=mQO3vNkqlcYs_xRaL5EpRIy1IRjMp4N9_vdwmiemPXo,6
babel/__init__.py,sha256=Z867BI00sB-W9VXa74LciJmUXnLcDE8HYet2sPuwwwA,714
babel/__pycache__/__init__.cpython-37.pyc,,
babel/__pycache__/_compat.cpython-37.pyc,,
babel/__pycache__/core.cpython-37.pyc,,
babel/__pycache__/dates.cpython-37.pyc,,
babel/__pycache__/languages.cpython-37.pyc,,
babel/__pycache__/lists.cpython-37.pyc,,
babel/__pycache__/localedata.cpython-37.pyc,,
babel/__pycache__/numbers.cpython-37.pyc,,
babel/__pycache__/plural.cpython-37.pyc,,
babel/__pycache__/support.cpython-37.pyc,,
babel/__pycache__/units.cpython-37.pyc,,
babel/__pycache__/util.cpython-37.pyc,,
babel/_compat.py,sha256=DHx6vQR-LazZlNdeBE7wGTOBv1_1HWbRboKOkX76TiY,1685
babel/core.py,sha256=fkxYTtAryVBEwsfYg78Y-eRfq_dRkzSFlMSXoDoliSM,36907
babel/dates.py,sha256=LLBcjpOs_8Ng9StJdk-HcUxUnrTTtLtKYKOdLzxGJYU,67479
babel/global.dat,sha256=h29NSbHmHUP3FulyRoiU6aDSj7FgcGBZyc212lnUL_c,253062
babel/languages.py,sha256=UmLTj4Nai3kQrwHX6jptehVLeAw-KAdxcmcp2iDlgvI,2743
babel/lists.py,sha256=nON3qfMoLLap0YTTRGBYWbwekBFknIABbulnsX70lrk,2719
babel/locale-data/af.dat,sha256=n7zhDTHDXh8voszXVJXnIaLC_ihEYStNBQdNZC_TKYE,167016
babel/locale-data/af_NA.dat,sha256=SCCik7H53r08j9VR2MlOEPFagN5P153ef0fbsIFwsvY,1425
babel/locale-data/af_ZA.dat,sha256=TvoOI0O5FP8QqIwI506xwhymtW-ZwtnGSh7OGg8W69s,626
babel/locale-data/agq.dat,sha256=K7_PXOHrOyQBaZgHpJFpsI7DyOoOgxij1TmGPvHW3r8,17399
babel/locale-data/agq_CM.dat,sha256=gn5wN9w6lKBCTkuKLwKCFrnkSAxA5Utr1N9py-ciqkc,627
babel/locale-data/ak.dat,sha256=TI9PGam0sIfflHMh1jbrZ9hKpRVfcZVLC1rGyFFTZh8,15911
babel/locale-data/ak_GH.dat,sha256=iTuDrca96IQYXDwnpNiqKtGn8d83kgs-LsWTfJntS4s,607
babel/locale-data/am.dat,sha256=lT1XWTPuNzjyYF_VTyqhmCGAY7upVMPUlD70ebWnqcw,198396
babel/locale-data/am_ET.dat,sha256=8zdStIXJmVpgppSVpZUwV04WJ4kUWQwFWRVb6AQZahY,626
babel/locale-data/ar.dat,sha256=8ClpxZdrXplVALQ7UzQRwmmEbJMo5nSH1wNzQVfXBr8,335041
babel/locale-data/ar_001.dat,sha256=gmM0xCrg2w8yy4Xh8viSHsnitl6HnuuDMv1yRdaE9Us,1698
babel/locale-data/ar_AE.dat,sha256=0HjSYklVuIAiTVYB-qEzvdHa2jpX0d1bOxoNhPORbO0,1056
babel/locale-data/ar_BH.dat,sha256=Yt0X2Col8Ha78QaBs5_H0FDllYTwUeGSZI7VCvMplqk,669
babel/locale-data/ar_DJ.dat,sha256=0hcv-LMM77rO6XsghzDdqbIgEXy7b3be1ENh994EWrM,647
babel/locale-data/ar_DZ.dat,sha256=LxHWSDb5T857YqpPqJebqi4rrm08IvgvuK-B8I8B7PI,1731
babel/locale-data/ar_EG.dat,sha256=7NhEtUCRy1NrFLHz8VXla7JSZdILpnIc65DYOD7zLYY,706
babel/locale-data/ar_EH.dat,sha256=CNFFp1Dwz32d0TcaMjTKIYVxJovVaFLlUvf9ACV-9tQ,607
babel/locale-data/ar_ER.dat,sha256=gRdDWAO_ViChJEPChaJcNSIX4DmMbkAMPOpKnq1QuZU,628
babel/locale-data/ar_IL.dat,sha256=v57QG4365fJbwLF6usm9-fEuq9qJYd74nRD_gWmoTk4,1213
babel/locale-data/ar_IQ.dat,sha256=4Xi7cAbOE20jYoFxqifGshiC5tGrXrEUCsiZGwR21kg,2354
babel/locale-data/ar_JO.dat,sha256=LouBd_IYs070hS-CYkyqg5_Y8sE-_y6cl_TNp-lGFcU,2353
babel/locale-data/ar_KM.dat,sha256=vvAkx4__TxagJozvyudv38VzVgIljVPBPvyMGsz11G8,1179
babel/locale-data/ar_KW.dat,sha256=oWH9zhdJxQivmCEgKERJ8ADEobK0BqdIf_6zfUfGgkY,669
babel/locale-data/ar_LB.dat,sha256=cKY3x_auClEkSF-O9bdUuG5bOi-X4HZcZv-7HcM_uV0,2354
babel/locale-data/ar_LY.dat,sha256=NsVSuzMZGJg7-AN4FurzzIMh8iqQECSEPJHF6bW0nJQ,1670
babel/locale-data/ar_MA.dat,sha256=4XxmrdYhzerIdqqgHLH55Hg9W0cMFcqHjcAlPLRNn2g,2025
babel/locale-data/ar_MR.dat,sha256=IRZkTxOZ39eZDjDDr_h9GjzgGbS9Kylg-8QQfJAzl0Y,2191
babel/locale-data/ar_OM.dat,sha256=SwCPf_1V4qcSfB6F3LzQo861te_t91Kk4GUX8IV_A_M,669
babel/locale-data/ar_PS.dat,sha256=hGNnwUj0XR-x19RySMnnOgC0Kd4odVkj85gYJHXxGLg,2291
babel/locale-data/ar_QA.dat,sha256=Ymdu7PL0gvtu7JPJ-hUyoL1eN_mHss3ZF0XzMkqrsFQ,669
babel/locale-data/ar_SA.dat,sha256=MxBfIMX4A3Na-0pYAK868hW35JMWu5msIVsF8PRWF0c,1906
babel/locale-data/ar_SD.dat,sha256=nl5A-ub3QRuVC94h7X9Ak3gG9nrtylVXPh3O7Pb9PvY,669
babel/locale-data/ar_SO.dat,sha256=5dbXvnXAyzeXNoRifm6dn9iHT3uR1-sERZJvw0xrdAw,626
babel/locale-data/ar_SS.dat,sha256=xeUbi35nGVXcOa1H9b97Qx0GhQGmzbQmuyQwhpP7dFs,649
babel/locale-data/ar_SY.dat,sha256=bHN5TGlFMD7H2qIWGO_5GCLjtPm2z5u6pLDLPTksCWo,2353
babel/locale-data/ar_TD.dat,sha256=NbYxtvmfDFCgyukJrfEluSKqSvxGkXkFdVjbowqUUmM,607
babel/locale-data/ar_TN.dat,sha256=utHPqirZxiuEj86xGgZwtv50o4sqavbD-pCLA02EkC0,1669
babel/locale-data/ar_YE.dat,sha256=ugcwooc31b9YRfoeVUihy87zQGHbpe3Pcp_GzSmYk8Y,669
babel/locale-data/as.dat,sha256=v0f4nBxj3fZPic9BPsPp4w61wfdUWeyrn0fZjyfNWTg,230479
babel/locale-data/as_IN.dat,sha256=_1h7wmRt7evqXyqFIKyo7Ppjclhn5w9rMtLEZG7Fdgc,649
babel/locale-data/asa.dat,sha256=uX2LqOD_JuPjax_NYcVVCrXUCKAzA0xAve7S6ZzfeY4,16240
babel/locale-data/asa_TZ.dat,sha256=Rfjx5YBakJ7TNX3X4uETtuArOF40ftdEonNMAwVNRzs,608
babel/locale-data/ast.dat,sha256=RiZhHO8GKLb6a6dSEdV5LLGpLzzfhNz0XMzHpgB5_Es,210339
babel/locale-data/ast_ES.dat,sha256=-iocl3j5ah7FgqKU9XQR-adJKP8bGsBMLBFrbg8NjbY,645
babel/locale-data/az.dat,sha256=QWl1YVyX7U_hMfum9QuBZBz4zB3NXNEYbuuJC6Ja-P4,191875
babel/locale-data/az_Cyrl.dat,sha256=nPqsNWZo8xrNUv-N8zko_9Ve5pQ9WIDMdeI95CytiPs,38954
babel/locale-data/az_Cyrl_AZ.dat,sha256=0C_Y00OpozgdcwgbDOGs2R67PQ0TBF1bMz4ovHLmZ8o,626
babel/locale-data/az_Latn.dat,sha256=7wrR4YWwEpz672DsTtFe7BItp0oMldPoRHnDT4yP38g,2246
babel/locale-data/az_Latn_AZ.dat,sha256=MxhS4BIHuziBc2dKVrCvBHWw-zt-ZRqb_mc7QyRegm4,626
babel/locale-data/bas.dat,sha256=O9JPLvooFeB-9j1_1l_ceoknZUVVG7aTxXf0Pud4Xss,17184
babel/locale-data/bas_CM.dat,sha256=6roi5oBt9ByVAnbFcedUcRg2HmkLI7m_gL8L8Xx18II,627
babel/locale-data/be.dat,sha256=H_LUM0cEwMyAkBnw0UvBLHTR-mYnJxv9pXOjYsLZrUE,258410
babel/locale-data/be_BY.dat,sha256=GTDO3Za8gEdwa1UabcPJ32n-rSJMvRfzBA_sh0Mv19c,626
babel/locale-data/bem.dat,sha256=-_tBxlhspwM9kCqBwWO7BtjzMW-lA53eZ2omKfErUng,6555
babel/locale-data/bem_ZM.dat,sha256=d2G4WKDe_WNPgAv1JxwRp4vsMfyIGdgBfj5xhy3E5MY,608
babel/locale-data/bez.dat,sha256=bOuSiU51Cl-TV2XlIdyvBaRSvWrua_6FJSf0Yd3bT4w,17031
babel/locale-data/bez_TZ.dat,sha256=pPQxCLShiJXgsePTeWi_2VtDRgjIorpbw2Ab-43Pvfw,608
babel/locale-data/bg.dat,sha256=UJAqU4JVTF_CqUzNFPOpGadCFUe8YoG_-GZJ_wCwygs,228458
babel/locale-data/bg_BG.dat,sha256=S98UqeOmRzTvOPoQeaXthy7N_0-ZCi3g3qScIOkGPxE,644
babel/locale-data/bm.dat,sha256=UvSMvGOkaTuc1xClsfD0-6WaTGswrRKfYq89SzD8imQ,15938
babel/locale-data/bm_ML.dat,sha256=SYWVELAdIQjQ3lBuH6pTth6iAqJNNaIx7BcxGiVwnos,607
babel/locale-data/bn.dat,sha256=78q3cks0mrxGtG7Ap3UAHakz6y9ADBlJVF2is2BWX5c,257002
babel/locale-data/bn_BD.dat,sha256=MwMquIkmom46hTd8CKQqNjq0h9oIT6kWmj-rTvLDXsM,626
babel/locale-data/bn_IN.dat,sha256=r2TxDmuVgf4o9O7Cf4A1b5C-V9KwuY-0P_l8tAd-sKA,884
babel/locale-data/bo.dat,sha256=3dtXet1Kn30N9c7_IKIVXY-Y2i7Hy9pWsIp262awTMg,22546
babel/locale-data/bo_CN.dat,sha256=MvO2V4RU2DlFEFgbOw-hjWd6urYl307-WshKY3rlTfY,626
babel/locale-data/bo_IN.dat,sha256=IORPH-j_pfMPkLapwgn1WpiXBz0Ww14YpwqWlbPzrps,722
babel/locale-data/br.dat,sha256=Efh7GVXZ7TseoExgtaItATT6JYDbCaCURZTsDmXOEWM,252311
babel/locale-data/br_FR.dat,sha256=YpLV2DDbxqxCSqEIeARiZFN93Zdm-sIvK1WbZSFaDfE,644
babel/locale-data/brx.dat,sha256=7B-M7g9qrHT2PDvZq-6xTP898ngNpqH-qwiB3JvY8gA,124250
babel/locale-data/brx_IN.dat,sha256=bwkbIeGZgMgv1gqFuvtEcg8IaUw8fU8386EX9NxwazA,650
babel/locale-data/bs.dat,sha256=xA5-CHy_ckC36r_0fjX6F8tGkwtVvr5Q3fPs1Af1KOk,236345
babel/locale-data/bs_Cyrl.dat,sha256=qpMX1PaPmMJl2MhacaXdE8cy1q3vPVvlIQRsKCNj6lk,191220
babel/locale-data/bs_Cyrl_BA.dat,sha256=BgEXeYz5aTttgFbOjVxMt8iVwiiYfUW2K-sCsjqcqrs,626
babel/locale-data/bs_Latn.dat,sha256=uwk-ewWX9CKMAfLwY_vXGjHF_g4Wr9DrpESqKQxUmtc,1978
babel/locale-data/bs_Latn_BA.dat,sha256=edblc-NRnO8KuX3JVgKLz_gnAwYe2j01L3iORx-8IW4,626
babel/locale-data/ca.dat,sha256=VkbK3W_WAdeKSNAPreqIHHehmkXa29lio6HUuFchyas,205061
babel/locale-data/ca_AD.dat,sha256=bNYq_kpURgNvLK9gIo8oTycE1bPkGYTKMqa6JKHaLkM,644
babel/locale-data/ca_ES.dat,sha256=B0ToeaEZp_47D_jlPmegrl2nux7jJdhTjERBHAllQ5s,644
babel/locale-data/ca_ES_VALENCIA.dat,sha256=faT9vchH9xCQ3oY6aR2shGI-nra3S2BHllCzqcvI1Xg,3694
babel/locale-data/ca_FR.dat,sha256=JxBZKaUKWLHNi01OliUAwDTlDAob7flGt2J_5gYaZRU,663
babel/locale-data/ca_IT.dat,sha256=F-cytzBQLU3eQ7d-eqwL5FBwUsyES-w7IenrpJfRGKU,644
babel/locale-data/ccp.dat,sha256=D-Z3zFzkvAtx4TFn94QvpQXfbzVSE3KNWIa7HRCCvJs,275311
babel/locale-data/ccp_BD.dat,sha256=X4bHc5AxNakoqr-IIiW5pXlshq1rkhmd4j0vASONKCY,627
babel/locale-data/ccp_IN.dat,sha256=CkS-g4E_3UpAWPJjcfSgLLlcyuD27IU-Qm4IWbAwj20,650
babel/locale-data/ce.dat,sha256=gwy0HT8b1m8DmGvv5a8wee3bFCNEYbDa0pwxlmJaonY,138724
babel/locale-data/ce_RU.dat,sha256=rKRXXvqaIlC_Tfa5SF9R6HucBA-8xKjx2A0rYkq6ico,644
babel/locale-data/ceb.dat,sha256=YwpY0r-7M6KN1YmcaLW7SgFSdx1yFp52mVqibrVQ4cs,14214
babel/locale-data/ceb_PH.dat,sha256=IIz-5_7M1McMnHJN34FagjNAKNFeyvgDIHo2RC0qaJo,627
babel/locale-data/cgg.dat,sha256=5Yi9LIsGO_eigtjnod_HlXYMFk7f_GyahSbPgM2yHkI,16281
babel/locale-data/cgg_UG.dat,sha256=29oSxuO0qn15ASh7buIKuXO7mY8l0CCjXPlLeTtqBGY,608
babel/locale-data/chr.dat,sha256=xXmjvaz3o4oorx1Mz8mSYeR0mY1U7m6Kk9FaIdIdK34,196079
babel/locale-data/chr_US.dat,sha256=j11CnbcC2COjt8xpooqGlEaMuY7HogdiNJLxKfeBvTU,645
babel/locale-data/ckb.dat,sha256=RkC398fj4MV-J3h5gXZIVvx02o-QNVoLejjxegf6QUY,41589
babel/locale-data/ckb_IQ.dat,sha256=aFZTJDR-waJrFCJrO2ugN4TRok8PWQ3gcEmdV6O_T7U,670
babel/locale-data/ckb_IR.dat,sha256=yrnqsVJoq9LsKcSg_ohDFgHkWBjxYrR24hlZOUSgk94,1222
babel/locale-data/cs.dat,sha256=jh-2Zl-BZDY4XdLcLQkUbirc0T3LoLD43EY63kfd-fo,287652
babel/locale-data/cs_CZ.dat,sha256=Imyqwx45p8yWaflAiltP7ccTxXK0F3potP7N2RTbv7s,644
babel/locale-data/cu.dat,sha256=rPG2xcL0oE-YmO73cZTNqfj0yY4mIKV2_58uWin5cXE,20286
babel/locale-data/cu_RU.dat,sha256=LkEMZZAlLI59QsMHcLj_yE9c7vvXGnFE1FSvnkgZkwE,644
babel/locale-data/cy.dat,sha256=eWrgesXXY9RM0NtaNKPdVocHizrtfpt1nKJFAawESl0,321884
babel/locale-data/cy_GB.dat,sha256=rCCg3lvwpJORx7LgBBF2dWsd6VnYybbrA2Na7gu7Q_I,644
babel/locale-data/da.dat,sha256=lE_A0Soj5f717xbJRW-885u210Qb80YafUnsIJ95y4U,197581
babel/locale-data/da_DK.dat,sha256=vP5m0YY7VSUJcHy4fJHTaaYnJFWKEv0N80rUCEd7pPw,644
babel/locale-data/da_GL.dat,sha256=OodsMwjqjnqXSimIZCOmUWDLLiIR9vvRj0imLbqx-mo,607
babel/locale-data/dav.dat,sha256=A8cH1vXScTUFVf2lRV0hMXQ957TF3vFcfxdejDO0hIo,16323
babel/locale-data/dav_KE.dat,sha256=tcXkL0o3AyPNTHJy5FlubLlhfHI6XzDwSK3lXSJMws4,627
babel/locale-data/de.dat,sha256=qIRVbYLDdmpybehjkMIM3VGlNVFPpUk9x5J1mIE4PUQ,197068
babel/locale-data/de_AT.dat,sha256=32JpQ4PJkEsB0dlBCz4BOqrFV6ANrXeYZivIGtw55OI,2581
babel/locale-data/de_BE.dat,sha256=pubC4Po4M_ZKPiT6EBNPMqi5TT9p_v-cf4V624OX2GE,644
babel/locale-data/de_CH.dat,sha256=DsfawA4hefcW9wyxOmvvkcmOdVQr4CivD37so9Bk_AY,4023
babel/locale-data/de_DE.dat,sha256=2ON8LeGdw4j5qNt9W0QKYTxxTZMTnAMR7pTw6qnwoHg,644
babel/locale-data/de_IT.dat,sha256=7adTDuD3BT7FDRebEbAq8VDJPVof1zhCfSTPFd0Ojm0,1637
babel/locale-data/de_LI.dat,sha256=cgNDay03PtZnrNp06QeLFXcLdsA8ObWQy9pW_NFepj8,1339
babel/locale-data/de_LU.dat,sha256=CQs6M5llCpctge33i5uLbxUzxTgNyuXa-izDdHwIVLg,1083
babel/locale-data/dje.dat,sha256=agpAQ8Rcw1YbFLLtsKktAqSazx2zpq8QpCNrHX7v2MI,16245
babel/locale-data/dje_NE.dat,sha256=RuHsfFP4RVlgcjI8SNx2Fu6TLMbxKkHMNcT-oiJIMOM,608
babel/locale-data/dsb.dat,sha256=kozmD1hEZMJgcGpEkRHvaYP2Qr8CJFI85L8fVIAu8hM,179612
babel/locale-data/dsb_DE.dat,sha256=7T-uUURejnX_8k9QPe_18AlJaUSEe9sA9TMmQ1wgZMQ,645
babel/locale-data/dua.dat,sha256=bcxuGLFcYCtVFRPd5SgDkhTWbuqkxOD3mSgpdGOY4Eg,5376
babel/locale-data/dua_CM.dat,sha256=lIrnKrSo0EoxlBu9GBhXq510jzSkiEsjqqUqC_995XE,627
babel/locale-data/dyo.dat,sha256=PnfWJWkoPW-ejx4amchVKh7Fgj0QUhQZRKunyHtTNxc,10562
babel/locale-data/dyo_SN.dat,sha256=-x1xQXODFTNEgMz82mei0ikGCKCtFqaxU_NfECwqST4,608
babel/locale-data/dz.dat,sha256=tJZ6GIwTI1Bdbu58kL1EkSXljEaKeK4m4izG2a9ILV8,89987
babel/locale-data/dz_BT.dat,sha256=VdAHDYTPNqFMnYGYbHo576XYFPG2UF384aLrehyaqts,626
babel/locale-data/ebu.dat,sha256=HbHtkTdQzr5YZAkDkcFVHLrgjvX666F3RtNfS0Pp8jY,16295
babel/locale-data/ebu_KE.dat,sha256=cTzjMTyTnaPHdpt23aNKauKqMbsLluKDRAtqLlEcHdM,627
babel/locale-data/ee.dat,sha256=sbuNGrchVAjxZSubXsHsHsZhHnxDF6EQvlUsBituVeY,142502
babel/locale-data/ee_GH.dat,sha256=wQJ8oXvauwAl57jr4kCI8Lbab1x5TkDHXekT4DFthso,607
babel/locale-data/ee_TG.dat,sha256=TM0UYyUrl-Bmt1jlWRIF5bV2I2VE66u05sKuR8FGEFY,1159
babel/locale-data/el.dat,sha256=p_pvI3tl89K1INhijzoF7kqTS-3O9G1E6ZJbrzFxxok,238804
babel/locale-data/el_CY.dat,sha256=VD2Go4Sc1zQVjMJtLGvnmuF5OPEIeywn2Wwf4H6Fdjk,626
babel/locale-data/el_GR.dat,sha256=zCFaaWmChFq2CF-AKZwRnBVUrM7g3sLNzZnJwrSIIZI,644
babel/locale-data/en.dat,sha256=wX1t7tIVoUaqjyZItj-cKAaXrfqsCGL7ECdLhTSIj-A,188908
babel/locale-data/en_001.dat,sha256=D2FJZ32li6xFhpO-R9idEpvtufrPSiN4ek0LRdzYTu0,22984
babel/locale-data/en_150.dat,sha256=qTgjmREIoEc2Tb0aLkbDyjAzCKmCT8TVzcCpLX-Ryy8,1783
babel/locale-data/en_AE.dat,sha256=CN_hxkBYRCd-a9Bz3FlCK-EFnZQK_toPEIc4XFZXiEw,4132
babel/locale-data/en_AG.dat,sha256=1DmA0Xy8J-0SnJ1Q6v4esqx9Pefv-HSW78yVFJmrj9U,645
babel/locale-data/en_AI.dat,sha256=bhCQZ-Z5tOZRQW3PTA1rUs1sQGjGEbKximNzwVSchG4,1197
babel/locale-data/en_AS.dat,sha256=_SIxul_ScgcJ-XykW7DL3KHVNv64MupuobtohitMP4c,626
babel/locale-data/en_AT.dat,sha256=0SVp9Ppii1u4xW0iw5g23pZgoOf5oGK25-irBqFVMns,1218
babel/locale-data/en_AU.dat,sha256=sWiCpDVuk_oH7fZe2a8dyMYEy8anAaewADDb7DQAw00,19371
babel/locale-data/en_BB.dat,sha256=baZlkeds3JOu-3UKCF8ujDV4rvfXYlaKgJ4Dc1fXqiI,626
babel/locale-data/en_BE.dat,sha256=a7P2EyuIaAjbXY7j58nq31PwGuZf1lYRFnojFYj_OMI,1511
babel/locale-data/en_BI.dat,sha256=bkq4k2bBo_E3A8wwX7QEnZkU5LtQdTrMk2-QQ4imhMw,1180
babel/locale-data/en_BM.dat,sha256=X6PvUgbDx6OwHMalkjAXDd0lydkRBsSBrZqNl6UVNoc,645
babel/locale-data/en_BS.dat,sha256=9TrCiUQfxHHx7g-yz-_GI2klqVu2g252j0LuXaktYRo,829
babel/locale-data/en_BW.dat,sha256=X-9K41JjPlc41gmxcpaodMoIwXn5fa96bQYbJqv-R_U,2789
babel/locale-data/en_BZ.dat,sha256=eWzZruSKi_tgKUOc2rXUdxwNefFNhkLWXEan5uKgYI8,2968
babel/locale-data/en_CA.dat,sha256=fjrU_3issiPH6OyYP7McT5ksLmvajnuZ-E1MeTGuN5o,23633
babel/locale-data/en_CC.dat,sha256=B5QeLVVvHi6KfCjRyhNIOqSCQOUZAqw2oXjAWJhgRnA,1178
babel/locale-data/en_CH.dat,sha256=3sFUD9lRJQzMY-3aEbRem9G11PSz5qK405qUUSAIJgA,1118
babel/locale-data/en_CK.dat,sha256=sK0edWgfCMJRg0cwNPMEybw4r3xMqvpWEqndFBiWNvw,1178
babel/locale-data/en_CM.dat,sha256=6bqtyCDo46KZ9i4uwBJFM0ycKp5xMfKM1k2otywINNk,1428
babel/locale-data/en_CX.dat,sha256=aBkfLVs1UrWVD6v5uwBtSWBPVV4laI_FJqzutMfgG9s,1178
babel/locale-data/en_CY.dat,sha256=ccjHkNGpGSiwfPpVuQ6i-bP6V1DUzgl-uZkksWdgSY8,626
babel/locale-data/en_DE.dat,sha256=VEBAVN7p29z-qYL4NfxSToa6NoODpWBrE2Ubt7lzrdQ,970
babel/locale-data/en_DG.dat,sha256=MwocxJMOwXkvTQWxNUWVrJJ0aoodMD_ynLWZmJw1bQc,1159
babel/locale-data/en_DK.dat,sha256=5FZcz_4YOiccWFgO78b17TMTraw3jHBQxZ9KGOFGPKo,2368
babel/locale-data/en_DM.dat,sha256=6Rm5kjxUpw1DEtfbgMyvqxGc3adeO1k3-d1m2Hmmpwg,645
babel/locale-data/en_ER.dat,sha256=txc2R0EnMpsjnCYuYvwmufbkFsZoAICHle1dqc9BffE,878
babel/locale-data/en_FI.dat,sha256=ZzcResQZZ2FjJ_mibuL3uUNoko6y7eLBmnL-csiHl2A,2300
babel/locale-data/en_FJ.dat,sha256=AkxJVXEQLA1Ua0DngWALCkrXqrUubqYV19gSTpzSz4g,663
babel/locale-data/en_FK.dat,sha256=DyyvgOqM5DTTLJd_q4p3LxhnoGDiJTIQGKGNxhmIou8,1201
babel/locale-data/en_FM.dat,sha256=H--72gqzWZz5eW4lUNxBsNBxCln8004Xx65RlJ2S2lA,607
babel/locale-data/en_GB.dat,sha256=97zYSLKA0m4v7P-V_TFSN_UKyIUKDZAjTftemDkO-oc,4341
babel/locale-data/en_GD.dat,sha256=bgvDAPFq8l7HTa1kE5IzpVU29YsGZkEmmqyIsSRCJQU,626
babel/locale-data/en_GG.dat,sha256=ifKq9gm5rf9YNecYWdijPzNYoI_Qf1dFozvQIhDR8Y0,1264
babel/locale-data/en_GH.dat,sha256=hy1ltceBe1aTrg3z-DHmfB-bnAfWYpEA1OEZ5gsyql4,880
babel/locale-data/en_GI.dat,sha256=1Iy-rJ22TireT4CzQfqaqi3a202-tCvmb7zdJ4314is,1219
babel/locale-data/en_GM.dat,sha256=z2DFG5DR5B8RtTTIsVwZATy0S8rv6zAWLyRscQd3k8w,876
babel/locale-data/en_GU.dat,sha256=bMtawZxXvjc5yRXtQXrjZPTX4ZCMpnNF4CVS_1F9K5w,706
babel/locale-data/en_GY.dat,sha256=BqU6DCTro_M6hKRhOa9m0hyBDkpP0sB8RfHbDP2qrVo,685
babel/locale-data/en_HK.dat,sha256=ECm3X9TpRz3BRiZzS5PE9PPWavfqzmIOtxKHWJViFl4,2041
babel/locale-data/en_IE.dat,sha256=2lBAkPxrSoaj7bZJk8cji60rG8vvmccyjGpqDhpnBTw,2061
babel/locale-data/en_IL.dat,sha256=AX8nfaJRv06sLvsAwjd_6T-4Bq3bsUX2t09IbslQuh0,1415
babel/locale-data/en_IM.dat,sha256=H0aqDWIDNMcvtiWVlyCxLrUuKck6W4Z7PSaFTBgZN4w,1264
babel/locale-data/en_IN.dat,sha256=V9VhRVn8_5Gxz-flT_wSPUXB-ZeQvrxkHQ054fe70I4,2935
babel/locale-data/en_IO.dat,sha256=XHbYlZcZAPiASCNOgblEjjfEteKK1t_yMs5KuH7mbXw,1159
babel/locale-data/en_JE.dat,sha256=mTXSvtZBCgkwSDw_98BlFoHt4t8HmT8LpG4EwyLxFXg,1264
babel/locale-data/en_JM.dat,sha256=bkStMWRlXL3avU9nP8SqLr3RgexXFf2759B-ddWpYSU,1617
babel/locale-data/en_KE.dat,sha256=zIumuglOQkXXw6UBWKjYVsM4Xh9ofXtjzSSELyG0dek,1449
babel/locale-data/en_KI.dat,sha256=yzrJdQnlYEqBLzn0_lhvdvwLgO8PNu7amY049pRWNvY,626
babel/locale-data/en_KN.dat,sha256=Okq8Q5sfYpMAFO1mWI0zrnt5T1HOIS0RsizwmOvDnvg,626
babel/locale-data/en_KY.dat,sha256=6OZST-EB0kOfsSXlH3Lf50yIDUXEqb3Tv4pt41bHCcI,810
babel/locale-data/en_LC.dat,sha256=uT6fq9Y7SonQiwTniGmVdoEiRD8V9xnG2v0JPq7_JHA,626
babel/locale-data/en_LR.dat,sha256=3X0kSLR9P2riE7L3coz02uSB0RBvBjmGDI4xNw9IDbE,876
babel/locale-data/en_LS.dat,sha256=C-zMOTGHLkrT9Rcy7Fb6iyqdsLVxTFEkfDBMxq1bnhA,876
babel/locale-data/en_MG.dat,sha256=2tWGxQYUoBOnqap1IPgCbk-uEOJVohjPMwJwm3eZ-Q0,1429
babel/locale-data/en_MH.dat,sha256=bbe847fcBp-4kzZv-XJvFWSbVz0p9RyUz11Q5BBkyrk,1359
babel/locale-data/en_MO.dat,sha256=pL92dz8HCGnjF-wdGm8wsnouPCLFTCOE7vhstf8XpfM,821
babel/locale-data/en_MP.dat,sha256=15WnCMndLQPYhH94O54yb39H-WI_Cbv1wOs226UjhEA,1340
babel/locale-data/en_MS.dat,sha256=HKKSWqRV5wLDYJ8bsfptoeUmTKM4WF6uQXCCWSn4tAI,1178
babel/locale-data/en_MT.dat,sha256=jG_r8nn4zfVenRg4NnDCE49KUwgwSZMULF1fn6-Aaxc,1945
babel/locale-data/en_MU.dat,sha256=dTXBxkoStB11NJEfCI7Lk-aikFtQfNbzqu_AO00Dv8o,1429
babel/locale-data/en_MW.dat,sha256=S3sP1oPcKqzi1LuD0RPS_zhHjh-QwML8N690WMu4lwE,877
babel/locale-data/en_MY.dat,sha256=r7yQs7ToO6rVdU03_THwkvj5of3irIgsaBP7NVVa-hg,707
babel/locale-data/en_NA.dat,sha256=TUtOUjM6gdOCCxRo1hyY2uD0FAxuAsJ5zX2ttnqi3PU,876
babel/locale-data/en_NF.dat,sha256=3Ax90cl26hC11xws1R1BR7f3a3Aq0bir0iLJpoZyQS8,1178
babel/locale-data/en_NG.dat,sha256=sFZNrlCMZepXl5e9keqii9Z5zFuyS16R8oseYHN7T6o,1430
babel/locale-data/en_NL.dat,sha256=5nDlKVas8Unr6igs2LEZP-vABuwzCm_-FouqtOoi76Q,1115
babel/locale-data/en_NR.dat,sha256=CYRxajoGhR7Q4_cLTM7e0CxZ5WvRkuIAPB78bMt-sFU,1178
babel/locale-data/en_NU.dat,sha256=CCM0oHPRMBwNCGlNJm94krl1zyJvVCTIr4yD9XOTers,1178
babel/locale-data/en_NZ.dat,sha256=-BiOtyCtqLAyIOwsI-IlWLi0vjoxUAOkqR9I58QLOWw,2252
babel/locale-data/en_PG.dat,sha256=g6U0Ch-VKEDfF67z7xPDto7M21uck2BLN1fxMVdYUN4,626
babel/locale-data/en_PH.dat,sha256=iyRv_Jzm5_mzlOEv57K5SY0F1p7Q62IEEhSwl7-zl3Y,647
babel/locale-data/en_PK.dat,sha256=VgvTUaova5Mn2m1PH1vmTL5SY47fiF52g-0rNlFxNGE,1977
babel/locale-data/en_PN.dat,sha256=uw5Q9dusFuS0N_WpHusewTgruprVn6rH5AmAcTqNkhA,1178
babel/locale-data/en_PR.dat,sha256=wiMMS_iLTFOKBnPfXBKi8axJ18gZs_AhsSX-C5xbXuc,626
babel/locale-data/en_PW.dat,sha256=YsAyfY2ZCIxcH7vjdDhhJw0i1aZBUjnhswCmvFPTcEI,791
babel/locale-data/en_RW.dat,sha256=SXtqSRArmZ8Ae6tRLBVKN9U2jbcj4SBMVlmJOPfFY88,1429
babel/locale-data/en_SB.dat,sha256=TeRvJTZaVeth3jfMShL4J0uOP-c1ALqPuGz4Eay40y0,626
babel/locale-data/en_SC.dat,sha256=lUN96sufcnMy_OfIOEl6UCRSSvo_QItmM9rrwBHWLBQ,1179
babel/locale-data/en_SD.dat,sha256=0HivywfMrAPu5oRYBvds840J3Z6VyuucNEwLVQtYLA0,919
babel/locale-data/en_SE.dat,sha256=Nu0YE8yE53k2b9-z2_v0psDPiBEXS3CSUj1qRyvMUoo,1445
babel/locale-data/en_SG.dat,sha256=LT5fEP_actBmosDXIwGlfrS1aCuLJpN3ZOZA4XzVjfU,2075
babel/locale-data/en_SH.dat,sha256=5XMQM5SJRvoOIRqaR41oTHFJExn5V7GDUX5xiWlwF4E,1201
babel/locale-data/en_SI.dat,sha256=UjOS7UvVNnplRgs7YHYq_y3VO1mOaa989GxkXgaBMgE,986
babel/locale-data/en_SL.dat,sha256=1rmpoVhWrAXrcs-bHLXkrg-kpDL_YZalzDuH2fqR4k4,877
babel/locale-data/en_SS.dat,sha256=oGCDgGIjDXUK9hT1suYMfML6oHRHm313ecMk7k3nNmY,899
babel/locale-data/en_SX.dat,sha256=9s9Gw3t7w-GJsdo6kj-1x0g2TqQQmwfjPN3HbK8UGFQ,1181
babel/locale-data/en_SZ.dat,sha256=9Z4vBh4OBz9qkogw6-_gC5TQMhZBr88afkxbW6W9sek,876
babel/locale-data/en_TC.dat,sha256=jSfmNR9nqY3QY3N1OV5hlpZqi1zbULjy6FoBgkp1yWw,607
babel/locale-data/en_TK.dat,sha256=7Gy79svNx87Zoojlbh47BuKDzXGnX8MAtDZfp8lwgVs,1178
babel/locale-data/en_TO.dat,sha256=_jEqtNA13O7OvHiS2nYxJRJkSVTEwHp0Gz8a9Ap2Qi0,627
babel/locale-data/en_TT.dat,sha256=vckW6xKEehZYDQSnUNFc4atkBHUoYuQIHwSh2955a0w,645
babel/locale-data/en_TV.dat,sha256=zxOKaQFXMQ-Xuop3ypoHtrkNnjJxEsBMLzY8qo06_4Q,1178
babel/locale-data/en_TZ.dat,sha256=y1E2NWDNV24pOB07058E130ucyopZL-ES-J3BLZxf2g,1430
babel/locale-data/en_UG.dat,sha256=YQAlu-EAk9G9yvfvTQ15JGjfHYdBcaaspXwLDlbQ44g,1430
babel/locale-data/en_UM.dat,sha256=-dAHtvQRXZvhqFx4Ywp5ZfEmyLmmas7IoEyZqiUMBNk,644
babel/locale-data/en_US.dat,sha256=MXmrAUts6riPcAPxycocGvrM5Pe1Yy4c1bgrMf1X1CM,644
babel/locale-data/en_US_POSIX.dat,sha256=ItCAoaRpz7l_hVn33T09k8oO6G2zaTPLVhetlzyRu1Q,1222
babel/locale-data/en_VC.dat,sha256=Iw2ujy8fVyq5JqWFMZpI4mV8Se5tshArasCTGBiiAYw,626
babel/locale-data/en_VG.dat,sha256=4bSbfJa4Ce2prdCxLLgLLavOIgLzDd8SPAXOudWIGbE,607
babel/locale-data/en_VI.dat,sha256=z93RamsD556JVbwZ4QgFpZl51wlljnQL2PaLjZz-AIU,644
babel/locale-data/en_VU.dat,sha256=G35aw89Q2xsVnTFS4KD01XW2zejaqcEovjriXjcTh0U,627
babel/locale-data/en_WS.dat,sha256=Mugt5NlU0-LzAyWlSsLv7yUli8JgZdwcuwnxsRiU0pU,647
babel/locale-data/en_ZA.dat,sha256=MBf_0lOrGeM0Arv0-lAbLO3W8UR8WYdDwyhBGyWSMbY,3290
babel/locale-data/en_ZM.dat,sha256=QF860N1xHPumZFZUTGddFIUF6SGBBzga4-QoFozR13E,876
babel/locale-data/en_ZW.dat,sha256=yY7aWXU5ZJbBowJmIGIa5RvoanIkTHvOiiQqIp63jis,3220
babel/locale-data/eo.dat,sha256=TMUDwM_wjcv8djKo31VYxyFR3Mt_LIiEisfBu_pqN6k,40766
babel/locale-data/eo_001.dat,sha256=tSaGBVAG3TxoCtDe47FyjEQ5TfafFOfiLMuGo_UHtRA,841
babel/locale-data/es.dat,sha256=VaR2n-jrrDSsyTdDayniQ3astW1gipiOHaQPQHBWEdE,194176
babel/locale-data/es_419.dat,sha256=nUWG6Iwzuf3F3b0JTiSMPWWLrvY0rpjdoTybS2dxfYM,26732
babel/locale-data/es_AR.dat,sha256=Ghc1MeHAXi16RC3ZKebD5d2woK4nAJQ65f4BkCBO4VQ,9240
babel/locale-data/es_BO.dat,sha256=aQDwHqAMdFenAwv6-_r03hhEoyiDyxLZXzocEbBJfsQ,1995
babel/locale-data/es_BR.dat,sha256=QZywQ5KZyLte_fNcqbDIygsPzmDF6Ja9As1TFzHMyL0,646
babel/locale-data/es_BZ.dat,sha256=a7sxdDb9LL0tGBxsi1BDaAp6f5Uja3sCJUxhD3RHHHg,645
babel/locale-data/es_CL.dat,sha256=1EN5nTveM5IM2j8yUW8Y1-9ETVwCb8zzp4OGfLQxbko,5519
babel/locale-data/es_CO.dat,sha256=5t2IwUjWhe4ac35CmHySMf3T1h53HVwVTkU-ArphtUE,8827
babel/locale-data/es_CR.dat,sha256=2BdF9W_FF2r69u1RwZVZYo5mpZtSZ09VQXYLLZDMzzY,1840
babel/locale-data/es_CU.dat,sha256=sgvidA7XSwMT_7G8aC-Wa0ORz6uo183Wi-Kqfi6MB9Q,647
babel/locale-data/es_DO.dat,sha256=Ky4Hi1jNM_D54yZqdOHHcJo3p955IR4UqoWOcf8EoC4,4375
babel/locale-data/es_EA.dat,sha256=KsGmYEQF3RGUBwIl6cy9rrK_w6EfBFQEumG_eUjfLs4,607
babel/locale-data/es_EC.dat,sha256=QuO779SP9qI4P3ibEvXN85Vz5TNZwVawZ2xzSsX_ls4,3428
babel/locale-data/es_ES.dat,sha256=h4CfqSZjQiBcat4Ail-hSnB-m-Y_NugqeBfxIgMt4bc,644
babel/locale-data/es_GQ.dat,sha256=RQKztBLLAiTfMLf3BGg7dKptJiuAtnJnqCzDDwsNsPk,890
babel/locale-data/es_GT.dat,sha256=9AynkT_3-f1NgQwjHLhDBuTdk1g-fEk6L-lgByM9UzM,5401
babel/locale-data/es_HN.dat,sha256=HG_KSm5wCWirqFgpkqsblYDIP-DvG8uXCJl5SerlPB4,3630
babel/locale-data/es_IC.dat,sha256=CVkbqGdI9IV5YLwQm98JmeV_j9aNVBVXfybiAfUllTM,607
babel/locale-data/es_MX.dat,sha256=RtDHQqHZ0k4x6swSZzqEBAMz1H0dDvg1dbp6x3c84bc,30227
babel/locale-data/es_NI.dat,sha256=PoUvRbw15FFGGMSXH1UcGAMFDE56pUhCwqMy-OQJONI,1774
babel/locale-data/es_PA.dat,sha256=_FLAiUSNiqSMOo_4Rie9jUQbuRyPvWayhasf0h2offg,4038
babel/locale-data/es_PE.dat,sha256=J7BX7ZMyKLKaoZqdnaJ6ZhEE8IobYAgdbr3cdKdmZJo,4997
babel/locale-data/es_PH.dat,sha256=GjEpZddkkAk4djW5BGTdAbNB3uNLxyF9gRI0Bq9TJ3I,1223
babel/locale-data/es_PR.dat,sha256=r1ZKqoFKiRSgmnMsQqmrUg6YJE6mIQ7X9XgpE0G40Ys,3899
babel/locale-data/es_PY.dat,sha256=JbyP6zYSw27ITxC1cnjOG9GdF9uyvYkvTHN7Tu0a7-4,5696
babel/locale-data/es_SV.dat,sha256=tjtwln9QSey5X6pYLlGnjTAhqHr-qWB0QpG4w2njsAE,1482
babel/locale-data/es_US.dat,sha256=VrgZvQX1GgetFpYpHvckiQYogVWRwjc2M8D-7TxZv2Y,24121
babel/locale-data/es_UY.dat,sha256=tim90leiltAPHB6PL4tKlPxtRdOQeyGdZkmhcX3Mt_k,2620
babel/locale-data/es_VE.dat,sha256=zoBdAd14aue2QuyRO3mezQrhE9cxwcBf9NKlk7ztd2A,3869
babel/locale-data/et.dat,sha256=24aCVLFHNppuQ0X_vnBr4OVFcmWZe5dupyyxcfTEe-I,196651
babel/locale-data/et_EE.dat,sha256=UwAPPde8mci_crGM2PS2J8THqUklf4iLC0LNkJfhIm0,644
babel/locale-data/eu.dat,sha256=WVuvurT0swPV-ulsfGd2jOpdLnXcbeDs2Ffwf8Q-ulU,173710
babel/locale-data/eu_ES.dat,sha256=e2mycbz_gicbFO2NxOroBar1paVtVks6rbKU0DQk6hQ,644
babel/locale-data/ewo.dat,sha256=CVDmRkKDgI98Ivojg5oKXKmA7nVSweqZwBOXA8GhMe8,17648
babel/locale-data/ewo_CM.dat,sha256=yI-aZ1OfnkLk61VJZjwNOTD7D2vMJtMSLKsd00E59uw,627
babel/locale-data/fa.dat,sha256=FKYp2fb-CSGnNTlCC3A7rMbyzhsLtRVuNKD5P26dhFo,209713
babel/locale-data/fa_AF.dat,sha256=NaSQ96MgGR5EB_RrJuP7gFiVrb6loUYEMYcypy7zyoI,10921
babel/locale-data/fa_IR.dat,sha256=_6JotyeGMhpci4hEoWxCqlzsc95z74XWfNvGlGLtWLU,669
babel/locale-data/ff.dat,sha256=xPo7THascqSO1Rwq2MGxSCutINq4u6E0KdHNA1auG-o,16137
babel/locale-data/ff_Latn.dat,sha256=qOic-JuCdeeklvJgvz_ic-aw9d3EsppDxyFQm7wj0mM,857
babel/locale-data/ff_Latn_BF.dat,sha256=TBLGrbWzDpgdEEYFKQkpZQkv2I0K51gAkxkurqqohVg,607
babel/locale-data/ff_Latn_CM.dat,sha256=ZQcoR9AgeNUiLmlOJpSHw3Yd5sbahiRIaB2lsQkOsWw,626
babel/locale-data/ff_Latn_GH.dat,sha256=7DSLmQiwlps_cxa7MaALYb7DX_AVYlGnu15UqDtApyw,1206
babel/locale-data/ff_Latn_GM.dat,sha256=xbEOy_TAIY-LSlRkH64Vsg9xfbKVrCkmwamam8Z3xds,1202
babel/locale-data/ff_Latn_GN.dat,sha256=ULYUzwcPqC_gimeYGNJH_j-bIOlzZ20uoedKbXB6jxc,627
babel/locale-data/ff_Latn_GW.dat,sha256=lTepLAkz6EAUlbB21jVvqGFUKf5JtLzxaOViEFlwj9o,607
babel/locale-data/ff_Latn_LR.dat,sha256=Lw-PTOI2dHUkdIDKR0M2QaJCGyI0MUHJK79r_khRokA,1202
babel/locale-data/ff_Latn_MR.dat,sha256=taaeZq7jyy_js3p3c0Yu5UNx4qqUTZ6zMRNowaqodLY,1203
babel/locale-data/ff_Latn_NE.dat,sha256=iRB6Y3Yvt1tzteou4XSPNImRDVwE9N89BZ21Y8n9A1U,607
babel/locale-data/ff_Latn_NG.dat,sha256=iYPaPREiLDN1GaFfnhAPlFgOIMeGiUBcESuCiffDGnI,628
babel/locale-data/ff_Latn_SL.dat,sha256=_u93myYrYvpfjoKunJyec1UR0N_sLUQwHftaG9RPOns,1203
babel/locale-data/ff_Latn_SN.dat,sha256=-2Hxjt9xNLa_654UcxzMMv1KwUzUhE1wKPyg6jJ-cJU,607
babel/locale-data/fi.dat,sha256=czptLtqLLwRaT87E8j4viQYquDALQk_dO4Z38WvE0Yk,220945
babel/locale-data/fi_FI.dat,sha256=MGv__27w2XhG65cGxiclFzXkB_D_mtD80vGGsy6nGHQ,644
babel/locale-data/fil.dat,sha256=BOUOPdsMPhiofGhRVZanQzT3o3b8bK3V-bPqJ3tMKAo,172841
babel/locale-data/fil_PH.dat,sha256=MTbJNbhtGJjMyeNVaDc06RxfS4iJax53eUzWVERHSzk,627
babel/locale-data/fo.dat,sha256=kCxt3Qo3lYS5H-DL_DHwQOsIYEqOkgs_GtQQ44Cozv0,157255
babel/locale-data/fo_DK.dat,sha256=lmV9_xmi7w1HrXr5Yw7buNhw8hkF5DuWjQByYKIFm64,665
babel/locale-data/fo_FO.dat,sha256=1YHlmzgNj00QkxBql37zGh2Xk59lIuEbf4AKb7eTPSk,644
babel/locale-data/fr.dat,sha256=OGxp5sUu9_y5IP-Ku2J-5FDGvkpVbTqOGgQ36ZXt7PY,219084
babel/locale-data/fr_BE.dat,sha256=XvJOf1Tlc0LpQ3-hhQMfSC_XiiGOSR4P-bHQPtcboBM,1272
babel/locale-data/fr_BF.dat,sha256=Yg6rGyEXEZDbXdN1s2EmVqgVKAzdkaSRsWYbbF9QIW4,607
babel/locale-data/fr_BI.dat,sha256=QTAzACKRrDjWBIvPnuh-S7pKSgQ0-EaPibEA9bnw-2Y,628
babel/locale-data/fr_BJ.dat,sha256=84mKliQXBjf3pNe-WKeZERB1Bcdtxy-SZL8uMHZal_c,607
babel/locale-data/fr_BL.dat,sha256=Ka59fV0FRq0JzEeNzq-St-Mf848YNmOCCKEpLp4EX5g,607
babel/locale-data/fr_CA.dat,sha256=J6ijg4KmEIHf2UergLrNzEP8TdL7s-7iYK188NvccSg,66182
babel/locale-data/fr_CD.dat,sha256=iqZnRL-wo8a8aVUWQElzQt2-Gue91GXrjaxZZ5JgmJ8,1124
babel/locale-data/fr_CF.dat,sha256=RwuuRJZlNtIhH0rxZ0Q_mfYx_Z5Q3HA_D52-qMSNaUY,607
babel/locale-data/fr_CG.dat,sha256=GLdmlB2XcqX8tkO6l77gX8QUiA1-5eoQG9392mOXupI,607
babel/locale-data/fr_CH.dat,sha256=i1hobiCDrCYMXMOmTILEGR33AgD_S4Idqo_xXn7g0SY,3101
babel/locale-data/fr_CI.dat,sha256=AMmxI3EDyAk6_eB5vGbm14Z2bUL2RkkkgjOLqqYwrZA,607
babel/locale-data/fr_CM.dat,sha256=_EXjlq9zVSLhwAr9dlGsxBYYaLBuSYIJ_-i3yQy62ns,2101
babel/locale-data/fr_DJ.dat,sha256=eahykN-Lv0UwRDXuwv7-28RSO9l8kC9mOOHHXG99Ad0,1223
babel/locale-data/fr_DZ.dat,sha256=JKNF1ht8PGAL4-e60lungLzK4K20v4DGZbNQt15o3fs,1265
babel/locale-data/fr_FR.dat,sha256=a3YB7XtoKNc7Xdt-U_6EuFyVBEgFgHeZ9CvBc3Yzjx4,644
babel/locale-data/fr_GA.dat,sha256=Ps4qHm0s51rRNwpSw2YbSexqC7sDYMfyu0zL0iJc6b8,607
babel/locale-data/fr_GF.dat,sha256=F8q_LQGRmooV6LxNIHDfnG3oDXXGnYYopP-ujwDSizw,710
babel/locale-data/fr_GN.dat,sha256=pq1ao_Medwq07tp6P5AtqZPlrBzqtIA2R6ywLllJyQY,627
babel/locale-data/fr_GP.dat,sha256=weJ00L82hKf6xOGLC61Tij1VJ5H3bvN0dnSUEUV27Pg,644
babel/locale-data/fr_GQ.dat,sha256=KwtyyIOmimlf-5-PEfzlJsztaM0CXx13IDZ68jdBYmo,607
babel/locale-data/fr_HT.dat,sha256=JcnL7Nkbmlcm2GsiQIxnEcrB5VoeHuw0i9Io3ujI640,1891
babel/locale-data/fr_KM.dat,sha256=wAFOqIYkxtC8fubh9W9hKEOw89O2qmIWGH-z8m4zC3Y,627
babel/locale-data/fr_LU.dat,sha256=X48QX-y6C5HuwtOvBCYjYbG153us51yvLE66MTYk0g8,705
babel/locale-data/fr_MA.dat,sha256=HAKjUmYRtiYg5bPuxpmWOiETyHSM2e3F0MJKyF7clRo,1295
babel/locale-data/fr_MC.dat,sha256=xg-cqOKfvudfHpU0YEWJwKBYdPUUCyf04623NkzlPIA,644
babel/locale-data/fr_MF.dat,sha256=y7K9sxuz2cfiM5mxj5HP3vUrFwUvxdNqhNXBmf7IrhQ,607
babel/locale-data/fr_MG.dat,sha256=Ie_c98hiYNsbSsj3XgSZBv8vBsDJvGhPXD7cBBtzSSY,627
babel/locale-data/fr_ML.dat,sha256=xtckB_DZJRbd9j-aekggaDo2wtG4K4Q2MeG3GBupvPM,1144
babel/locale-data/fr_MQ.dat,sha256=taQ9FDzbIrvD6LgkraEmPVD_Un0unsJXkgkU1rH_718,644
babel/locale-data/fr_MR.dat,sha256=PbXQYUYZlFU8URmI8OeZ6O7SAZZOx_TCL2bxs_ilAxI,1203
babel/locale-data/fr_MU.dat,sha256=8fELzIJhgczk9JucdI1IMikCSnL5k8LCk1dS68q6opg,627
babel/locale-data/fr_NC.dat,sha256=uMR2yxfoIpxTTmeGka9y7p0hnL3ch6F24SBvbDLrl8s,607
babel/locale-data/fr_NE.dat,sha256=Qx4bREydf4aupR9SU7ZZt72aoVBHzDSdQs2bWnDTa_Y,607
babel/locale-data/fr_PF.dat,sha256=nvJ7u7lByAKHmmz4HghF9_RaL5f3r2HF4YRyOMl8AKM,607
babel/locale-data/fr_PM.dat,sha256=uorRfZ9XXMmL3UebWphyYLgmkIQaKgHBQfR2YDbKwWU,607
babel/locale-data/fr_RE.dat,sha256=biR9VqHmRXkXuNVbp62tu5m0e47UOupW8xLHCseLfzs,1160
babel/locale-data/fr_RW.dat,sha256=ya3hnx8I_04vbYN8UolfgHuDGYsxIMyIcC99i6hxgYI,627
babel/locale-data/fr_SC.dat,sha256=_l7VQ0RTuCTNdRpKIb4_59iTeU5ACXYRTQ6MTrg2CHM,627
babel/locale-data/fr_SN.dat,sha256=vUOjpMoha0OtOvwn70YnaafSUc_KwL2iwgQzeYWjH6o,1295
babel/locale-data/fr_SY.dat,sha256=ue9tpydvd35pkJSa3spqtBLRhVNesB8cUHGsaV6_xhk,1265
babel/locale-data/fr_TD.dat,sha256=txJhV9BQyT0mzhyO--I5U4Tg7aFJybP1mIFWGCnY9i8,1183
babel/locale-data/fr_TG.dat,sha256=6wDKXhxKopznkvtYoOrdWSnoyG6uFIZBMfsVBaicOPI,607
babel/locale-data/fr_TN.dat,sha256=Ezx2vA1loQOWkUJ7J4AJp9kkNOG1LZtht3aFo5RpelQ,1203
babel/locale-data/fr_VU.dat,sha256=ZgQ6sA467JMwLeFluaFNu0K371_nB6U60xoVu8f5-Ag,1203
babel/locale-data/fr_WF.dat,sha256=3cX5YsVQSnIWvtZUfWDvmS6j1ROn5n2y7NQhaMVd2BA,607
babel/locale-data/fr_YT.dat,sha256=PlyHp0evijlgZNqwlqzGA3Vd5Sg-8YqAshn0qa2DOjk,607
babel/locale-data/fur.dat,sha256=l7MnjsGyf6paBvhc2ptmAzs76ozv3dFl9ZOgKxsF5uE,35099
babel/locale-data/fur_IT.dat,sha256=ejwvN9cxtQZwBkyXQAZpNYUzYFysgFxVgvXVKclQJRc,645
babel/locale-data/fy.dat,sha256=G_LaZPkRr04wFmJ9kGuI_AeG52QymjLrPEVwbooJfWQ,110136
babel/locale-data/fy_NL.dat,sha256=nhxKtw7-G25m_7b7tSRqOJnimegsXUPL4HUh-5A1ZaE,644
babel/locale-data/ga.dat,sha256=pOfrnacrPyVVIj2mIVQ3Zx3xuLpsfiyQ5MkpQrcySQE,315363
babel/locale-data/ga_IE.dat,sha256=EnRVPIh4VUjDfkiB9s02m2TP3vDnBaHlmKPgfW95AbI,644
babel/locale-data/gd.dat,sha256=F7tTnoJpF_v73ELahGQKGS8JxkYOErOa_rRsoeFp7wY,286062
babel/locale-data/gd_GB.dat,sha256=U0i4ctdbsfL5CoppA4cD5B98vSfsfz0liAAGQ0A81bY,644
babel/locale-data/gl.dat,sha256=wD6yvNvveYtPHMlxgnPDol4n2w2sJxeE3nBJTLZZrT8,171486
babel/locale-data/gl_ES.dat,sha256=QnkGnjNmjUPyyrlCJH_ERU8d2R7hoxwaOjgAnDsrr4k,644
babel/locale-data/gsw.dat,sha256=qggIqARI0NVgVe3t1I6BqiufNHifSpVwZZKuEvr4Jh0,108107
babel/locale-data/gsw_CH.dat,sha256=MYXozdZ0H_TO_tLGex3-9jMtxwA_ggC67YOtIL7HVP4,645
babel/locale-data/gsw_FR.dat,sha256=Tqt7-0qu6jwWgmUJqpE-IlXVdxJWwSzl0CL5fFjvsrU,645
babel/locale-data/gsw_LI.dat,sha256=z97gmIurjhls6p3KuVQaEjtVhKMN3Gb1N6DZFp4Z2X8,645
babel/locale-data/gu.dat,sha256=EPgHFPlwI-8galuxjZfM_SKN30m5FxFNuHs94u0DN-8,240477
babel/locale-data/gu_IN.dat,sha256=IKkO1dxbal-2z4ryCnkkfqnIq9ODz7mkcIA1qTMHqz0,649
babel/locale-data/guz.dat,sha256=p_lLOLBUf11NUyX3s_ko99ZWvzznAfoLt8eZzfHzNVA,16068
babel/locale-data/guz_KE.dat,sha256=mZPi0_BX7npx8EdqhpWJind4kbNhw7OYAAqu7lAo-H0,627
babel/locale-data/gv.dat,sha256=0Dl1uBLCFLB7Az64CxnnqE4q8LIfSS0aPeAce-Vkxg8,4167
babel/locale-data/gv_IM.dat,sha256=PABxweL3RYxPq4l55rn6LROd-5DVwKZ0pysu1Fuc6nI,625
babel/locale-data/ha.dat,sha256=Qoz8fQprv6EosR4QwGBn7fWFrhclPtxFWuht4SWYpes,48450
babel/locale-data/ha_GH.dat,sha256=w8NWdFj6TIKsC8eo2kWorNwgLj4sZYYi56ufkWfzySw,1206
babel/locale-data/ha_NE.dat,sha256=J2YetP7_CacIyuOkdRf6iiqsJ5u7RdLH5n_4UHFUyUc,48142
babel/locale-data/ha_NG.dat,sha256=jlzeaMkYJFe_hIpCbfJIZn98hJWUh7EAroefrGANiqg,607
babel/locale-data/haw.dat,sha256=tXRuf2DBns0_OvMJt1wNoQkknnKXG97hHLqo1ixldzw,16127
babel/locale-data/haw_US.dat,sha256=oOE5jDDNwZpoRIAsW9IH9z8O4dJvDkJGBcrhxqclTag,645
babel/locale-data/he.dat,sha256=oz7v2LgGDOqaDQM7PX6N_YVZwKS-o1wu1GcrhKJ7wJU,261916
babel/locale-data/he_IL.dat,sha256=t5wXL5OtYL02_veNJ6q8Jjl6tO_rvngx-k-ET_8Rke4,669
babel/locale-data/hi.dat,sha256=qN2bvRPesEdxGLGXCXGGs8kXyqZFaFOs-OtOcwqtfK0,236155
babel/locale-data/hi_IN.dat,sha256=_E26Mtk63x0S_sHBIDa0OH2l-671oKocLyKNbyOxhts,649
babel/locale-data/hr.dat,sha256=XidD6n4-tSsjxwUu0Du6XuQmxSuY4EoH6FwOkIHzuoA,219476
babel/locale-data/hr_BA.dat,sha256=vXmTmPdhw_3UK7Uq-DtS69o4hV1p9qh_ATXztcz-edI,1179
babel/locale-data/hr_HR.dat,sha256=mOAx3MaOIHtwTdxBS2LqjvttQODxHsqDlUHHFolsE2Q,626
babel/locale-data/hsb.dat,sha256=0WoqoPYp46ku2fxzcewLkyXAA3sjnCwkwwd59DKAAMU,179147
babel/locale-data/hsb_DE.dat,sha256=AD3yaZypLO6rR-ljF8F3NnVJxCYYCxL9WTMUpagn6Qg,645
babel/locale-data/hu.dat,sha256=bokUuxUQyJkPGvtaEesVa3BVOS0WZ6ZLREN8lb0YtNs,190503
babel/locale-data/hu_HU.dat,sha256=HzJJfSglsxs-2KNMktuKpZ9yOMa6bfQ7eudxGh0orHk,644
babel/locale-data/hy.dat,sha256=USOsG-90IoRXB-uvsQcCvwFskJS9MIuRcQ6ZYcO2tB4,207877
babel/locale-data/hy_AM.dat,sha256=E4cN87TIR1DzbY8ih_H4bv-s6sC0EuAO2F1l3I7xKt4,626
babel/locale-data/ia.dat,sha256=AbRm3U78keo7BHpisZrZhxe9zH3Ss846DrslrRo52z0,112799
babel/locale-data/ia_001.dat,sha256=hlAa0qtQHkLi2GpqHmsEyROZth9cXgmAH1-4X3dGlR8,932
babel/locale-data/id.dat,sha256=7FGnheeRztRFIuqveQHXi--GtIBJX5eGHZ3DiFEc9VQ,159525
babel/locale-data/id_ID.dat,sha256=89br-9nXjDl_YMYlmEZsXntLYg3zVvhnWhPW7I8TsK4,626
babel/locale-data/ig.dat,sha256=FJX1bguZTb9nzeGLvV0-rCCPMSJD4zNBB4vRovFhnDk,19576
babel/locale-data/ig_NG.dat,sha256=zLSkkEBmGK6ao-gQHmpoLw0QGnpk9AIfpHuwReF436U,607
babel/locale-data/ii.dat,sha256=jqN5Jw5vYev2U5prUjk7W8b2ZDcEqfRWFY3F6TRX03A,12609
babel/locale-data/ii_CN.dat,sha256=kSOyQhGFX7GGD9QbOTiQ03AIwHKTlPiAA79bhpnd3RA,626
babel/locale-data/is.dat,sha256=2wQuZCiLU8LVDm_etN6Q0Sb2uUgj1906stigLji9lvk,184858
babel/locale-data/is_IS.dat,sha256=ggPurORP9YiNltg50UxSvBn-IxSLQkh1Z7vfuAIat2Q,644
babel/locale-data/it.dat,sha256=kZer8Sb4ged3pz5ZqH0NxxTblZGQih9UTQMKdzjHjt4,186750
babel/locale-data/it_CH.dat,sha256=x6SYxtwSWqZ7uK-WH7KnW9JQXFt7vjDVWLeRdxNMOX0,2794
babel/locale-data/it_IT.dat,sha256=69HQXaQ0Hwq5FyY_HFQer2_TaNRK1cBkfwcQoOgD3Q4,644
babel/locale-data/it_SM.dat,sha256=-_kXYm4nypjxW8slleVL6npDk4swP0VuVVyDSZzusQA,644
babel/locale-data/it_VA.dat,sha256=bRLH_PNEqsBmiEeJ4M2QrT7m-lxfJ5a0mtzpOm4wUnY,644
babel/locale-data/ja.dat,sha256=44T77KPAAaPXTo2vWGPev9w8uSgBjuxH0NdE3Rw6zV0,195480
babel/locale-data/ja_JP.dat,sha256=_CcStROLo6tiJ1AUoPpL9-zhxd3dWAcgH4UxcKW5p5k,626
babel/locale-data/jgo.dat,sha256=JHgE9Kgm8gcKZXkRFnBOZL8SWmVefD76VPN1EM660yQ,12649
babel/locale-data/jgo_CM.dat,sha256=EA-EL_vITEcScepi8XtIZp9SfEo8yNtP8mjMDRmurrk,627
babel/locale-data/jmc.dat,sha256=ACdBwmyvLhD-wfTDx5C9BkKiLgAwYi7MXIZjWdnqDs4,16120
babel/locale-data/jmc_TZ.dat,sha256=YEigUlJSIsg-2I3KAGmE_7yjVxXeavgHrzATPawzjZ0,608
babel/locale-data/jv.dat,sha256=bugx3sMto8K-q6UzIdI9_ojxBU4freLUOFll-ZETNrk,127658
babel/locale-data/jv_ID.dat,sha256=jqAFeLVxdIWZRCImM11zmQxFVN-dTNSHqkJjMd0fL9Q,626
babel/locale-data/ka.dat,sha256=PMXkpUZaQ_RakmWrSOapK6BVvijldgDvMO5S5sBzZBM,256694
babel/locale-data/ka_GE.dat,sha256=Nexqnu44i7cXrHB0jlI_229QKiNr_k1TSvCRgb_Zs14,626
babel/locale-data/kab.dat,sha256=Y9PhkPEATNfpisfItTMvOrgnz4QOSJKONa2Xiz4u59k,134995
babel/locale-data/kab_DZ.dat,sha256=7hA6qIdD8G5tnw61QlzIlnS5U__261v3XMZT2Wr93X4,670
babel/locale-data/kam.dat,sha256=bcQ2hEj3o40MkIoAuecyMBXwN2xCRvPoalAKBmbVDYM,16227
babel/locale-data/kam_KE.dat,sha256=1FV6VA2MmH4ZgMbVnb24DpSO3XAYfQzmKxz3z6CBAjg,627
babel/locale-data/kde.dat,sha256=f-939iTq5qzLyicPxbSOfbEd5zUzqNxVOJsIP-2sGoE,16527
babel/locale-data/kde_TZ.dat,sha256=BNkDCTvTEt2R4XEQbKwuNKe_n132hEdXd_YaYunBcL8,608
babel/locale-data/kea.dat,sha256=XBJTSBu8-1l_psgKfETjGEq29Q-l2mAd-XqgpKt01FE,71368
babel/locale-data/kea_CV.dat,sha256=vucrSfeOhAM0D2XqJADgq1aQj73Y83CxF-5dBMPjT0g,608
babel/locale-data/khq.dat,sha256=xIIW5VdIykrnI2yYkhlJzskHmJtESjaDC3GMB9PlI8g,15992
babel/locale-data/khq_ML.dat,sha256=KdvooU50jIQ2C4KfGTuRStG_55DFqJxRh5UtULTAQW4,608
babel/locale-data/ki.dat,sha256=zVuht0vDiuwqzW0ifSGnmQ0UvWm3RyyWCA9zzEFrY5M,16175
babel/locale-data/ki_KE.dat,sha256=JWVi4WSFtN_grtZ1IzyNQlSb1tzHGg0UxFV9MdcONUg,626
babel/locale-data/kk.dat,sha256=PAr_CVdN5k0hKuGNK9hz6vVVdPqcfLe03yGEL6djPr0,204884
babel/locale-data/kk_KZ.dat,sha256=XSbqJqUhzr0pVwmcdJJPydCzCawHbVeP0u4DNA_jhXE,626
babel/locale-data/kkj.dat,sha256=fNklRbLpQSyan1SPutZRhWBypx8TWYjc1j3UIvwYTwk,4907
babel/locale-data/kkj_CM.dat,sha256=zBage_7dyPY1TQoWCrfV2TPdlDLYTSAFmoj25CyvALs,627
babel/locale-data/kl.dat,sha256=ARiEanVrxmHJWpZoDHL3-5bDLLpnc9nS76f7Kp0EqBY,58262
babel/locale-data/kl_GL.dat,sha256=U8dA1dcOZa88aDh_Za4Q_cuO-QkjQON-623nTSwvzJo,607
babel/locale-data/kln.dat,sha256=0KelxA56bvhWs7VzcNIzxcJg9wg6P2bfOkZf1ksltd8,18062
babel/locale-data/kln_KE.dat,sha256=pA_rLiD4YhTe_VqEzm1jUNqr-p3oC_RW2Q2JCptdo5A,627
babel/locale-data/km.dat,sha256=I0D0wPvYR6vlyAqaPiNmS3du1xnjKYALBalmsisoGZU,199952
babel/locale-data/km_KH.dat,sha256=pJZh2Z4Gna9qJRlOE-yQUcTXYMJLxXW2GiA41f911PQ,626
babel/locale-data/kn.dat,sha256=rI2BegUtzlVb2hNxE99hBpeFtNzVHoOdhVYQsh_hllU,254243
babel/locale-data/kn_IN.dat,sha256=O1SMSp5lyz45vrJD6qZOGdUJvk8sA_pEMpfZ7GT1Hcg,649
babel/locale-data/ko.dat,sha256=3ewk1I7Tm2zL68NIomcXz-j08yxgg_sILYHgKkn46n8,168189
babel/locale-data/ko_KP.dat,sha256=Irg8tFOdTqkLtdsJK-whgWIl6CYpzQP5RQSBXFt5lzA,807
babel/locale-data/ko_KR.dat,sha256=iWEE9J7ywo-4HLyDz5nRWPCIBwxMXN-g8ZVluEgP254,626
babel/locale-data/kok.dat,sha256=rzStfhkGwa7NIRzhCOntT-evQiacjkBTH3I7F24Pc8A,82765
babel/locale-data/kok_IN.dat,sha256=RxPYjH26AlDPH57oj9jYulR-b4PZST3ejoheHedo2Bw,650
babel/locale-data/ks.dat,sha256=P0rhXLRGAZk9ltGxyomMfGhleHVGmt0Ah_mWMYTpkbA,102488
babel/locale-data/ks_IN.dat,sha256=FbxiVs_-h7d9TExduoM5qzHoWM6j60ymcgK1_2sjGKg,649
babel/locale-data/ksb.dat,sha256=R8_UFjgnrqwE8sBhSdvxmmE3pjBkNJKSbjW0xvc4eQM,16095
babel/locale-data/ksb_TZ.dat,sha256=P6Ek580ObjaXqb-Niin4LoZm-eBCTCZ_NDULLzMeC8Q,608
babel/locale-data/ksf.dat,sha256=XAtIX8SXZvHlRlMgRfFKFDaQlnVN1AuK2LjxTWQY260,16569
babel/locale-data/ksf_CM.dat,sha256=_uUJd2qMvazQrVHTPOtyXTDlMvbuoISj1F1s4EH0DOc,627
babel/locale-data/ksh.dat,sha256=7CaMQbLiUj_7eMvTq8m0mFmYr7kHfthLgCZerVssnNo,88940
babel/locale-data/ksh_DE.dat,sha256=VH8heoZOq8IaxYfsA9eB4CBxb_M1bSvV4pdJ8Z2E-EQ,645
babel/locale-data/ku.dat,sha256=Dw_Ht0WncxRqQySXLb0an4q7FaRG4oomDNNzGp3XxWY,25250
babel/locale-data/ku_TR.dat,sha256=M_qtiMFC7haUg8UTe3AZ1i-uMY8hzxjtpUgLM2kEYA0,626
babel/locale-data/kw.dat,sha256=xYQtQdmpCyUVtWGhBb714h-X_IK6ImM_0yPXyYerZJ0,7263
babel/locale-data/kw_GB.dat,sha256=-Xb3hs90VWg54TdwSerXDq3lxuOc-Y_wMyfdt9i-agA,644
babel/locale-data/ky.dat,sha256=xnyKiVz_OQ00SVC0ab6tF2lsjZOoLDXI9vKYaGKtcKc,198490
babel/locale-data/ky_KG.dat,sha256=KjjmZqSgzsCGALW0y1fiURGMVa7qmafuit7QKHfJEPk,626
babel/locale-data/lag.dat,sha256=OZ2DjZQ-J3Gn8NWaF8byI4KmSO0gxo6ynBR63oK89rI,17194
babel/locale-data/lag_TZ.dat,sha256=SCwIMT10n8D1ekVMqg6nAcRXYq6MJXNeWOsvVX8IkOw,608
babel/locale-data/lb.dat,sha256=FhQHzrbTMvwFSRCjlRF6Y6FRqDKKiXqhtJiP2x-sJMA,164416
babel/locale-data/lb_LU.dat,sha256=olHjVPCdBZkqm868qqx4i65Wwx6AOPe3lINW6aJAbeA,644
babel/locale-data/lg.dat,sha256=Tk3WfKfVjXIw1gCFo5hlyNuxhuqYdGe5a2jkjPDVYyU,16487
babel/locale-data/lg_UG.dat,sha256=qCr6389TVehgSTUVXvzatoLF1e022IThG7RjKuJpACE,607
babel/locale-data/lkt.dat,sha256=Av8k434perqFI_6BRP6EPkvchiPN_nO2CklTNVE80FQ,12787
babel/locale-data/lkt_US.dat,sha256=Ri8yk6RWC_94pO6kLYqlpBv03WiLu1rRWfmaNDI6IOY,645
babel/locale-data/ln.dat,sha256=yQJCBmMpVevkIOBOUGvrMkp_QhqUwdk7inbzdAEAdBY,25944
babel/locale-data/ln_AO.dat,sha256=gEc6KBMCLUvrJkXrbO9QBF21m4dzmbA30ThJTD0IhLo,627
babel/locale-data/ln_CD.dat,sha256=wd8KeWpyBgRxFdZOzPRq6rPq8E2BhpWr9-6sZpT4VuM,607
babel/locale-data/ln_CF.dat,sha256=Uz79NMUs8cX3JzPyKMf4u9_byQhjbl1wSGZsb0atVI4,607
babel/locale-data/ln_CG.dat,sha256=9Fq944Hcd7zuj_TDXg8RiHhLVlhxrchJSSD1TwsImbE,607
babel/locale-data/lo.dat,sha256=gDnF_vrRlupEleLl6cLJAjSHOQrYZ_LXQgllbgc6hmI,217931
babel/locale-data/lo_LA.dat,sha256=gWqRr8oGNYTNVtYlY9w5uqi9JSgzDujHxvaJZW1V3T4,626
babel/locale-data/lrc.dat,sha256=YdYWaAoqdqGpAmmjgdnIWYdVSVT-swZUtdX_3TOSBwU,19108
babel/locale-data/lrc_IQ.dat,sha256=mjDNrH0bJqU2uNIPr4W13ychqJLfa89zSoHyAflcM50,1246
babel/locale-data/lrc_IR.dat,sha256=htOM82RqZN8DsREw0zziwiwEf1Q2wRwzN4O1EPSUucs,670
babel/locale-data/lt.dat,sha256=iVizf8vz2hHKOs3M_WJsRUecMBMtgfJMAvXYWnBYdjc,281388
babel/locale-data/lt_LT.dat,sha256=xcQ9WNnIdoq-pTrJDlo6J2SKztAPEXxOK8xN09nbqsQ,644
babel/locale-data/lu.dat,sha256=iaOgjHXhBbxqkaMZfnIjZ9yE_ivOogLi_pIltMav4DU,15940
babel/locale-data/lu_CD.dat,sha256=kt1Ck2JozYYmkDCjbBbDR4U1PqKsmVHCQQqQViFWoDE,607
babel/locale-data/luo.dat,sha256=OMmTjOcNBykKCDRQkrlMnvTXd4TTWph1KNFEYeOp5BQ,15937
babel/locale-data/luo_KE.dat,sha256=mkNhvC4s9Ay-_oOw0UAbozSEtyhwktZb4IEXqHb7R6E,627
babel/locale-data/luy.dat,sha256=AivHUzk0jfpYWCd9XP-Vgn5SplZZ3Ruk7rVJTXhEqoY,15911
babel/locale-data/luy_KE.dat,sha256=GQ1Tripph0Kv8EWkDmbUkBoGNRu_Z6_nLraREYDOJIg,627
babel/locale-data/lv.dat,sha256=aYbZgH9misbuEu8xd2OPukaAcwmqJZkQjMB9I2TRq60,211983
babel/locale-data/lv_LV.dat,sha256=RTXhOKiDi25vJqvlFjMfx4ipS6IAz9NOsPfvqDA1xHw,626
babel/locale-data/mas.dat,sha256=GrT-spd-MlSJf_rY-oBo322W_GHL4BGR5-b9WAWhIJw,17349
babel/locale-data/mas_KE.dat,sha256=dvwGyYsRBTACJmRkHe9OqHdBcE7-_3JETAE5DcZxyOw,627
babel/locale-data/mas_TZ.dat,sha256=_awVxs_AqCJK_nFGOWUV8CfM0wkBTOyTxp1wf2wx38I,629
babel/locale-data/mer.dat,sha256=dfZLAHiehC0aQF-BXQRnSXnJlOkC0R5HU8uMpPqBMDk,16140
babel/locale-data/mer_KE.dat,sha256=VAEMZzP_zylJ_jGnwsvys8iwOWMxpnFD2FM73GEhVFg,627
babel/locale-data/mfe.dat,sha256=f7Hwcx7ufQhlyaXWkshVNcgbh6RbSWBa_YtQhy2nXDo,15169
babel/locale-data/mfe_MU.dat,sha256=fXUaGJO-Fk72_qgRVSQXN_4lCMNiOfLbtcrEMoc3Z-E,608
babel/locale-data/mg.dat,sha256=7bWB6HH3HhS2boHUg3Fk6mCT0hTOz3Ev3X9KfXZbEMA,23602
babel/locale-data/mg_MG.dat,sha256=Nvn9k4UDSq5ryL5pXMItBWimjPbby559_heRzkodQOQ,607
babel/locale-data/mgh.dat,sha256=sBxM1pG-ctAdiJ9hl4Hag3hN31bkQBRC2t6R7YIqnF8,10532
babel/locale-data/mgh_MZ.dat,sha256=bj3fkT73aicgmEW0wNj8Tl1fMg2raeXoMaC3u-CobtM,627
babel/locale-data/mgo.dat,sha256=1YpgmLHiJWcmqQvNcxpOIfKfOQ1etEAMDxkQi-psLm0,8266
babel/locale-data/mgo_CM.dat,sha256=PF1Fm-f95o7Lu9qG5FhDJCaHBmXmV1yCuacaLuiJcUY,627
babel/locale-data/mi.dat,sha256=P7-TssxweXKOKPlQy2VcGNMmiFVVVe7LP3hv6aWtiOk,19166
babel/locale-data/mi_NZ.dat,sha256=d_K1LO5OIXVG8bHH1WSIVFWt7mqjxrk9T1FUY7Kr3ko,626
babel/locale-data/mk.dat,sha256=n5yeB-nRq2EjOqcfESsgXiuuspwby7SS1l79DazdC3g,230242
babel/locale-data/mk_MK.dat,sha256=GT9GwufrdgHTNvJlilW5X73eWe7x8nPNu0orikX-q6s,626
babel/locale-data/ml.dat,sha256=TEf5l5IQe9_eEMqM81zevTtjxYnTU4ri4xdIrUIUfas,282791
babel/locale-data/ml_IN.dat,sha256=v0Q-8P2QVsISoVj0c7_SPRfv20HgUV7UpMkkyDXbr_4,649
babel/locale-data/mn.dat,sha256=bRv5302YnTCpr3_mHntO_5niulNYjgAIpzQbE1gyA4U,196664
babel/locale-data/mn_MN.dat,sha256=BU1qpiwX_J5qR1sdIZzJWIWhA2vVo4R85xQCMO53zeA,626
babel/locale-data/mr.dat,sha256=pTtl6V-f6FrjTSAdZRDI6uxb83NjwdyrOswfplw6GKU,242765
babel/locale-data/mr_IN.dat,sha256=aef7VFbvFenw6pJg7ZDW9vV52b9PENWqbIZ3ttetPiU,649
babel/locale-data/ms.dat,sha256=qTclc9o5XP5CTlDz3RDep-QXIKYCjSKcseELT9H53XQ,141460
babel/locale-data/ms_BN.dat,sha256=nmT_ZUqySFofHKPBOeB79OXLF0_rN9_1l7Ene73Tovs,1275
babel/locale-data/ms_MY.dat,sha256=V8RAiIYkRQeK0zZmGrMX_kYD81Y2jdyK8UkwWficNrw,626
babel/locale-data/ms_SG.dat,sha256=fN3UCsrV9k3HFZuIlgDV1Zx84C9EVpyOcGmy8HTM9VY,645
babel/locale-data/mt.dat,sha256=NvIc3Em2gHEQOmJNNyG4BlPFKayvF1oWOiXTyJKMfUI,99982
babel/locale-data/mt_MT.dat,sha256=XbWtjBItwBOzQ9dfXffkG7ip2cQtJUr_uOLXc3nP-Y8,626
babel/locale-data/mua.dat,sha256=5u-bhk5Yqn4Ol-rPRUa440aVFzOb-ehzlYykaVALBQ4,16624
babel/locale-data/mua_CM.dat,sha256=jU0ODX7XWhTxSHF4Zr1r6qX4F6GMTIhfojNXPlQeVzU,627
babel/locale-data/my.dat,sha256=0UdwyYsfpnde3_TNEl0ayDs42Zgg-QrRyuLehT1EJ9I,207004
babel/locale-data/my_MM.dat,sha256=Bzi2JR91HdxPj-Z0eaOLHk8UK7tTh3YVb65GdioGkgY,626
babel/locale-data/mzn.dat,sha256=vwdqMcpOLkqkInioKWteaAVTJj-zYLNNI27VcZA6S30,65508
babel/locale-data/mzn_IR.dat,sha256=Sx-axN08t4qUK6gkdZJI9gE_od94O9yBOl7R-xydR74,670
babel/locale-data/naq.dat,sha256=vBYqqBMEGqzIQx5SW1A_P3rbt7IAQAHBaBFc6GpSYWM,16670
babel/locale-data/naq_NA.dat,sha256=SVEYeRqX3deeAluOQ37gwHfm5cGLcPwiNtIKsGLaRlo,608
babel/locale-data/nb.dat,sha256=7TBrzSoRB76QUbA2V1dSgapFQhNxRQ02oxXxdnQeZxk,206686
babel/locale-data/nb_NO.dat,sha256=La0HFi0QPvkipslkGYJWyrvR6qyZSrVf3rxyFy5MAeM,644
babel/locale-data/nb_SJ.dat,sha256=kfp9zsPvpbuegd2KdJl4WO2eppXw-ybSBDrj_5-Sepw,625
babel/locale-data/nd.dat,sha256=-x3R97oZR6KBTEa7fIudTqIzEGLVO4ErMabTlbH5cqE,16364
babel/locale-data/nd_ZW.dat,sha256=VVVYbvzg8pvKDAUDUfCOBwVjgSqR8kRNZa3OvIMolxE,626
babel/locale-data/nds.dat,sha256=3Atq1yjM2MyTIsdO0hTSNHuRa-3XrXHsctO9BXE7QA4,50653
babel/locale-data/nds_DE.dat,sha256=g4IgcJMM4s8ZDbkks7VetJ60Fv_SDd2Z7KYNtt9EwKA,645
babel/locale-data/nds_NL.dat,sha256=LwfMpT7rOaqwl7fz_0H_rnApfhJpG_WG29HY5fAW4vA,645
babel/locale-data/ne.dat,sha256=CDG36HQ3kKDZcPo1_aTwmnLqLKHg7J-tx-twaveRRCo,243069
babel/locale-data/ne_IN.dat,sha256=Ty0qfdsX3laEfDmVosUYb1nO4lbPuSwUiUklpZ4CivU,1283
babel/locale-data/ne_NP.dat,sha256=_aOsODQJSx-2Qp518aT1E7U512mda8-coqIfgRf-pjs,626
babel/locale-data/nl.dat,sha256=xb8ziI2bEfydTIXYg5xpiF4EzxyKe2_9-BHzDKpBbfw,195437
babel/locale-data/nl_AW.dat,sha256=lWaFk0qMUWo_XmeHAOHypINaoLCgBOytw5qh02PjR2g,629
babel/locale-data/nl_BE.dat,sha256=F20Z12nxs501ipOGOVYEWgoMwOM37I2l__E-nytDl9Y,1853
babel/locale-data/nl_BQ.dat,sha256=NjO4GFRK0oeyv6-hC5I5TWijntLMaQkvTTeMTxeo2go,626
babel/locale-data/nl_CW.dat,sha256=a6rSPV1TGf0Iqj4WPtNdCMturkN_acf-wTdGwTjjcmo,629
babel/locale-data/nl_NL.dat,sha256=Z5xxMZmUPdO3at7G0RRR1jvz0nNRAErbp-u7EhLRJ9Q,644
babel/locale-data/nl_SR.dat,sha256=6KsySLDzDt8H_rmCVslpGWYLln01xoKdnjkga1MgbY8,687
babel/locale-data/nl_SX.dat,sha256=ax_nqnWCIEWLBF_OAZApdqoZxHGmrrpc9OXt_2GVKTM,629
babel/locale-data/nmg.dat,sha256=JHmpf4ywpHJH5X8pM-aIQeKqXICoMuvUgH6LfvYA95U,16241
babel/locale-data/nmg_CM.dat,sha256=kfA43rC6-mmtCEQ4KahPBUyi1SLoxOm2_k557QI7Z-I,627
babel/locale-data/nn.dat,sha256=3xzcFn_sSkfJdOutUmKmMXnuNvpUbqEfsmYUCR44V3o,179805
babel/locale-data/nn_NO.dat,sha256=D9aXKaMt_90RbYBc98NvfkVZ_fYV2qjg0JgsEHKXpE4,644
babel/locale-data/nnh.dat,sha256=iJ8maFwvQWWF2z83wPNeDWCjtXJkb-6BdqWTNe8S4vU,6787
babel/locale-data/nnh_CM.dat,sha256=iCo3O7kW-Lcsr5g1Bv5bfLm8FeKXt68i6aq0Qqa1cM8,627
babel/locale-data/nus.dat,sha256=cdQBVX2H5tuWRZUTUORxThKZb8ZA79CFis8Kuetgjxo,9183
babel/locale-data/nus_SS.dat,sha256=EVZv-VLRZIXfmHowg2c8xkeZcrFC6DWKbb9aYzuykl0,608
babel/locale-data/nyn.dat,sha256=glQUHKEgHma36QtPbPimKIoFNkaJflVPo6r9zpx15eI,16328
babel/locale-data/nyn_UG.dat,sha256=m8ciJ8wJ7Ss0sZEwo7hLys0p8X9Tc3iaPAIjKEvvzak,608
babel/locale-data/om.dat,sha256=C_dYC8Os3xZXrR2l_U3HBivYw6aidjIq7ROnCYtY9Ds,16609
babel/locale-data/om_ET.dat,sha256=Og-EdTbZ_HAokrmbPFbOUuekkmh7ch2DKDZctUISASU,626
babel/locale-data/om_KE.dat,sha256=2ANK67n7vvRAps6fTVn15wa2yYnz_2yy303bozguDSo,1584
babel/locale-data/or.dat,sha256=hN4VKCkslHZrQ7ti_sU0z_NiZoiEPkYbmmg6wcKMGkw,237035
babel/locale-data/or_IN.dat,sha256=Dw9dokygCMjypNY4sndqyvOnTFsaqW7pq_UPLLHqtTY,649
babel/locale-data/os.dat,sha256=8dpK1HvfjQPT978fBRPiuk1wRtlSp4N4lcyPGbxy3lQ,17648
babel/locale-data/os_GE.dat,sha256=hSmyrOpT0abvV17Q5Myoar7UMieDCmckpszmuRQJxfQ,626
babel/locale-data/os_RU.dat,sha256=HRXdDl4XLKTYX63EpDJYq556qySfi_W9q_qzuQL3J0A,686
babel/locale-data/pa.dat,sha256=kZGBG23ACj27nI-LNf02A3Vqw_KFvk5D2lQHH0G6UXk,237090
babel/locale-data/pa_Arab.dat,sha256=Xe20lZpLm3stLXFQTLIq5PKUD7TvC4ORuMzRZiONqJA,3967
babel/locale-data/pa_Arab_PK.dat,sha256=_VUdc706YAQSu8NoUCuvHosko2l8GtwMdvTXQkuh8oY,626
babel/locale-data/pa_Guru.dat,sha256=RemwK9VZViIqmbpdFqso7S9xD55LITJbW3sewM9oRUQ,1267
babel/locale-data/pa_Guru_IN.dat,sha256=S0ctHEKJ7RxJFlvLs31Yflmn3-RwlWb_zc_gCyVYzrA,649
babel/locale-data/pl.dat,sha256=kbqTerSZmugR4oZtKNUgvAbQFeRSK3uk5QfgvD32Xk4,228149
babel/locale-data/pl_PL.dat,sha256=3aqxuKmM1y3xnRC3iG6DRzNquFCjOhSe-USlt0jKY0I,644
babel/locale-data/prg.dat,sha256=cvJ-ohjlFKFxS89iBJxru6s2seI9xnB6GLQor53PfjU,20221
babel/locale-data/prg_001.dat,sha256=1SYLtROetU-QpYhHxtQWxcY5wNmVnqFxk-J1-3BLYoI,1585
babel/locale-data/ps.dat,sha256=QNalPguMKEGOci07W4rvZ5iHYDPK_eziWOIvKT_Aq7k,142340
babel/locale-data/ps_AF.dat,sha256=SvjOBEFhyxjFIfXoaTObaS7vNyKTC5-Q1qkOetchsWw,669
babel/locale-data/ps_PK.dat,sha256=bpR4HtSC8OlQ7_J0bMbZLFV7VwPzthm93offnsFF7Vs,7979
babel/locale-data/pt.dat,sha256=lrMOGnflLfsboEb198U1WjHI--FEF92bvn5hcr2bKqI,185356
babel/locale-data/pt_AO.dat,sha256=QxTSJ_njOOr9W8_wFKMefA5_X_vLLmbTyTShIPr2TYk,1013
babel/locale-data/pt_BR.dat,sha256=QIoCEwQ4MVDEptP3uYSfuxaUg8LM6Udx_j0u5Pmsscs,626
babel/locale-data/pt_CH.dat,sha256=1vlk7Moo0YazBGXzCnhzraOQ3na7vMkhiacGprATTPo,644
babel/locale-data/pt_CV.dat,sha256=TuFHk3Ps62TlH8OXoA0I5n20WXMtJwRkTX9F-ifPPSI,1038
babel/locale-data/pt_GQ.dat,sha256=n0jtozbmyMELo3v12qZh0bLpOXkl5Yo3-SaoNQzB76o,607
babel/locale-data/pt_GW.dat,sha256=kzmvupn16Ryy-Ad021sANJicQ405NWoLR3W0hYNgEHY,993
babel/locale-data/pt_LU.dat,sha256=2PCOkLE4Fpb5rr0S2yGHzWr02NXkEU1DFq9cb3jYPEk,663
babel/locale-data/pt_MO.dat,sha256=ASQA8OF0aPTEMnnUxPgOuTbVb-Viwruz72TsvnJXhCQ,1610
babel/locale-data/pt_MZ.dat,sha256=OyyIvwlWPVXczu-1QDtFD9x3TCgbuWVmSfabMU50XIU,1033
babel/locale-data/pt_PT.dat,sha256=JY6J734UdlBda2-mpgdbopkJ29vH6yRCa3Drs8TXlLY,95387
babel/locale-data/pt_ST.dat,sha256=XFGnYoCcv12C1dIPHpxn_yWF9TO42Ze6HqHaVX6lJXs,1013
babel/locale-data/pt_TL.dat,sha256=Kjoj1JE5C_EsdKbWricnpKTUirMnSI2bVcljIDUEtXg,993
babel/locale-data/qu.dat,sha256=UUdFgTeC0zefb0AXhnHREXZ_bRxMx4L3gZERS2byRFA,63678
babel/locale-data/qu_BO.dat,sha256=VSRgVtWlTouId0GarX_ZO5CZYAY7HulYTZQzsrcKpC4,854
babel/locale-data/qu_EC.dat,sha256=_Ftb9lPp9eFysmiuzd0LXl_da6UfcDdAH9iK4_OfceM,828
babel/locale-data/qu_PE.dat,sha256=u7IelNiSOLZ2gSHzXBLQloraDKQK6rn-Gj3PDrpg7v4,626
babel/locale-data/rm.dat,sha256=6VF_YfKkH4zszgClWGpCutJLsTtPzjz8YqqIIbN0cIs,67703
babel/locale-data/rm_CH.dat,sha256=nM16gH9DyFuCWJvzmeUYDeI935TZvG9PeHlnJtwsDJ0,644
babel/locale-data/rn.dat,sha256=fM0YjSf63MqPnOGaoSAiJ8pGuNLeS89EbA1uvZffPD8,16834
babel/locale-data/rn_BI.dat,sha256=ysozkQZjd7MbdqQR0Appe1Z0pWEI4Uvt2sOjh1Bm33g,607
babel/locale-data/ro.dat,sha256=0F4YaitEpsSgSlFL-spgysyo8PD5nWzldC3JRPR5iKY,214600
babel/locale-data/ro_MD.dat,sha256=BPMaAQ0BsOZkbpcwrDCR1dAYiIpEoq-d2QttEiTxLsU,3468
babel/locale-data/ro_RO.dat,sha256=YGMdthocAJEwwzNv1M1FpXmx1npnT1o6QEj6AIUJA1g,626
babel/locale-data/rof.dat,sha256=_9WDdCvXcsispR_Jc_bd29GLdco-0vv8BGBAuM_x45c,16222
babel/locale-data/rof_TZ.dat,sha256=SSYWLiv1Okua7sYRhIAvr9NDMLo-s0R9fVOnh380Hu8,608
babel/locale-data/root.dat,sha256=3EUwOJF9fGXgbeqm_oB0m72qKAi4P2KtMhTX0tgfCno,39989
babel/locale-data/ru.dat,sha256=4CN7wom0dRoNu6rl9v1J0obCQ_hqEhFFGogsD11Wotg,297015
babel/locale-data/ru_BY.dat,sha256=lucENheWCnm6l2kFWx5iiOQ90RnN-a3SrbbpSMEH23c,667
babel/locale-data/ru_KG.dat,sha256=q9H-sCDazhmCcmFn-QlJ1eBwVnXaGmqTFZT4W-9ssHs,650
babel/locale-data/ru_KZ.dat,sha256=zOoq7g7gEqS5ScuiwnNbhFcCYXEBVQ45hoH7x-SKlAs,647
babel/locale-data/ru_MD.dat,sha256=XQc4LwXm3XLmevt8NRZEVQ5tj03YVkeeHdVb_mzH0wI,645
babel/locale-data/ru_RU.dat,sha256=UZR5Khv2hLp9BZbGcy8Fxg_luQDS8dgxQqXnmrYOGKs,644
babel/locale-data/ru_UA.dat,sha256=OVdhP1Te2SUv0a46Hp1FTLUVwBTbNyinQDy1RLLE1ko,1765
babel/locale-data/rw.dat,sha256=gah0pwvkYNXB7CP42-1B5CMFxjVNjNKBXPBjdPumHuw,16234
babel/locale-data/rw_RW.dat,sha256=lw9-N-Bp6o2kOraBdnpqbHXS9S06vRGoRnTi13_-IDE,607
babel/locale-data/rwk.dat,sha256=UK-qdX5VjbSXmZ1sWLpUggwyEbekDq1nSWMqwgYdPYw,16109
babel/locale-data/rwk_TZ.dat,sha256=qOwICQ4gh7wiNjyNsrZXTCxpEgydLb3KLC-aGSaTdiw,608
babel/locale-data/sah.dat,sha256=qTZCEQCAO_up0P4P-GCMC_Kzofutc-1oSBqmetTCvqg,47992
babel/locale-data/sah_RU.dat,sha256=fOJxjNB4yurKvi_Nf5qd_vZ6Ph0P5TreDKysCfkMkAM,645
babel/locale-data/saq.dat,sha256=_DqapaXzmUP8SkniGLNOgMtUOCixaqY-2P6V6BUbHXg,16507
babel/locale-data/saq_KE.dat,sha256=lO5cOVp83wj9tFYBb-OT_EoKnvI81obyEODz__dalok,627
babel/locale-data/sbp.dat,sha256=B8DxXa27HxQ03WmXTxDWsUPpr2Xy66X5utdQ0lTjmQo,16532
babel/locale-data/sbp_TZ.dat,sha256=Ck2aub_MmSbx5Ixgs3RxF7dJU90nUufYP7cj1eq1eeQ,608
babel/locale-data/sd.dat,sha256=TyFszAcJ_BrtUiPW3HO8I3ZCQDGJJt5lukJMGWeb1uo,188120
babel/locale-data/sd_PK.dat,sha256=tYuzmCxNr02-FV1pgkN94YAvn67EdkluBYaUTr9S3A4,626
babel/locale-data/se.dat,sha256=cWzX95vS957wbyZvvNNrhu1QzUcSBMCH1RVy7buxSsI,72355
babel/locale-data/se_FI.dat,sha256=ASUyHt_ysdjsBx4jb2V3d7TFnnCrvKrAYOwvYHSdvfE,46541
babel/locale-data/se_NO.dat,sha256=9fc96ChjeOE9FGup8pr5S_zGqirgrITKl28Qz0lHu6M,644
babel/locale-data/se_SE.dat,sha256=pVfO3I0swSWy7w8Jb0V-om95t4fWuDWB1YdE_Lr-Gf4,685
babel/locale-data/seh.dat,sha256=LsaTKr3qRHEBZWqNElUVVxIS46jCkcyaoYn0d8lSq0A,15963
babel/locale-data/seh_MZ.dat,sha256=0diTZF6N0YXrV0kLWl9RY-0JXNWQ3uEToZEbCt6fOq4,627
babel/locale-data/ses.dat,sha256=dnGrI4ChN-RlvGcNMK8pcMGNgNuDzfsx8XF7RmAhz2Y,16051
babel/locale-data/ses_ML.dat,sha256=9Ske0UaoHZbpE8jhXhk-VL3HhIhmXnsIbcNQZeO08Qk,608
babel/locale-data/sg.dat,sha256=NLfT67esLoB5WoBHWk7E0TmROwJQ6VXil5yiqzOkUag,16688
babel/locale-data/sg_CF.dat,sha256=7BfhzANJ38-LvzQu3NSxTdhJtQ4kTwcKQAkt_NOWHtM,607
babel/locale-data/shi.dat,sha256=3VVluM_KYVrNoROBAAs3xUORYRV3p9-Whq9pBS-slDw,22106
babel/locale-data/shi_Latn.dat,sha256=uJs5g6f9GlIzLrzmtgx1rjgY9xWypEsYciU0lTEbtiY,15670
babel/locale-data/shi_Latn_MA.dat,sha256=Erhb65aX0qGNECddJ7M8fKy2zjJlOxXSKx42GdQdf0A,608
babel/locale-data/shi_Tfng.dat,sha256=N4AQ70s0OZV-xQ4VWLo5bGuPPWk2AxmAjNuNwvG6bIQ,965
babel/locale-data/shi_Tfng_MA.dat,sha256=Erhb65aX0qGNECddJ7M8fKy2zjJlOxXSKx42GdQdf0A,608
babel/locale-data/si.dat,sha256=SUEexMOM-IB__OVxxxhsDx4RAb5P3mw88ZoS-gemPTI,237479
babel/locale-data/si_LK.dat,sha256=r7FqkzM6vXhR4XJ5rou9GYtU_PEyYXE3tZZqsLyR-mc,626
babel/locale-data/sk.dat,sha256=HOqji-Ab_yHtmlHJtbnrTy8wAbZW0pemfjhxeZeU72o,246088
babel/locale-data/sk_SK.dat,sha256=EOO68wv6kuaSIKyg_BNUAfCCffzGwQlG0ABrSFcDJF4,644
babel/locale-data/sl.dat,sha256=AZ3B8_mrbyi03b28ovtymTa7G08kAijUL-XKIqdDLqM,237036
babel/locale-data/sl_SI.dat,sha256=0JTmTrnyrjJHBBK5TeJw_c-WW0ZmLYWZxiXPS2serts,626
babel/locale-data/smn.dat,sha256=V4oN72KuQA5RQjG6DCR2RFo4USYyOiFFLZu8hA0QZVk,42730
babel/locale-data/smn_FI.dat,sha256=87dist-cLsNDQvOxUdqRgAuktFuar6Tts6CRw41B1k4,645
babel/locale-data/sn.dat,sha256=DdCgeUS6dW5p-d4oye7sMvAKDG1v99WB8alp27YF_dk,23304
babel/locale-data/sn_ZW.dat,sha256=351RLV0bhiAOnieNqRyHN3oqG-v2GvJsYUSqXm1wHaU,626
babel/locale-data/so.dat,sha256=M7lVxEpfLSVGo07ZxLvnLWPGwZTmzT3N2mKWLvhS2iY,160943
babel/locale-data/so_DJ.dat,sha256=4nw-1K0M7naoWcZ63Gars4xqbghDMiSHbZpXIbPP6OQ,647
babel/locale-data/so_ET.dat,sha256=fsS5yO4V8ciQMRZwZ6NDWcQBQFCxe1SnVgH6BUurHSE,646
babel/locale-data/so_KE.dat,sha256=E6NS6cdOmMJTOUpVuwgBE24tiDpcC5IOw_JZhkMjbd8,1199
babel/locale-data/so_SO.dat,sha256=AFtTqshg30FEAb50Q2lAC39KETj0xcSxc2DdMegt2zQ,607
babel/locale-data/sq.dat,sha256=dU1WRBIEsS2Lg6f4mGs_Rc0-PjyrQO1syYMLcaVM3gk,173926
babel/locale-data/sq_AL.dat,sha256=5SVT0s2J5NyvKwdqzWW1S2PIj33PVVz3pWw248jn17c,626
babel/locale-data/sq_MK.dat,sha256=PsaPJulWBIFlvRKlnhK0he5lZm_AXVYmeyCq53YQcAI,1199
babel/locale-data/sq_XK.dat,sha256=d_afvPzsozxIT1_MAI-fK_-V8nHTN07NmyhuBwQAdVo,1178
babel/locale-data/sr.dat,sha256=Ki0eLxOs1OAdQX-0Vr0rSnWC67af2gDpGeXr7OuSEWM,277049
babel/locale-data/sr_Cyrl.dat,sha256=0n_px-LQ4ZndDQXL0o_mRHgVxLFeYLNjn5MGOovuOoY,1978
babel/locale-data/sr_Cyrl_BA.dat,sha256=kKuyKTEZCb1Gja2U90yXFFfUDBAKW_ErHKjdHuJ1ZN0,3930
babel/locale-data/sr_Cyrl_ME.dat,sha256=gKr7GFHjcxb7Sq9uINfs8VEP2y7MNh1pJ2h2VGqblM8,3724
babel/locale-data/sr_Cyrl_RS.dat,sha256=2TmzkiJSNDpNIij3yJ8nDhrKRNPUV1XX1_RXOKaGvAw,626
babel/locale-data/sr_Cyrl_XK.dat,sha256=43d0NPoc03YrzZBie2_7DLy5oScODx-cFOK5e9hOMxQ,2607
babel/locale-data/sr_Latn.dat,sha256=-jvRgfFrHOe4yU5pAbwigibdz7b_Wef6VzoQn3t86S4,230271
babel/locale-data/sr_Latn_BA.dat,sha256=egRvjBkcg2d2Zm_ehAqKrixzVpD9EtOws6JVnbVusc0,4195
babel/locale-data/sr_Latn_ME.dat,sha256=tnFSZ7uoBD6mSZL32MvharPQR1QL_oV_XaJ38bc4gbE,3579
babel/locale-data/sr_Latn_RS.dat,sha256=6mUxn3NaqRzjHSNgJA_xzJCefSUxqXSUEsoMJ-juYUI,626
babel/locale-data/sr_Latn_XK.dat,sha256=6BLGB40r1xCprUQ_rR6TVqubuxmhQAUKyEJa5XWAGs0,2693
babel/locale-data/sv.dat,sha256=TZH0cY_a8DJK5BAZLOPgACkWLLwFkf5swjFulbKwJTQ,213436
babel/locale-data/sv_AX.dat,sha256=qxZlBDrxLtr7jugZ2tsY6liGF9U84UyZIT7Ap8eL7iA,644
babel/locale-data/sv_FI.dat,sha256=aV9hIsn7qoQ6nGzCbnJpiiPRSYFOq0yogn8ujMenwqU,1391
babel/locale-data/sv_SE.dat,sha256=HUamjd52mLuY8GbVgBeijJpc--Z3dnARh2lcE4QvUYw,644
babel/locale-data/sw.dat,sha256=OBKVQn-9PlGNBo89Ms1lDlr2yRQ8gX_1lUMDn1bhzMw,175523
babel/locale-data/sw_CD.dat,sha256=dsCHY8aKyhmiRIIiStPfM3RK7oyvdUUYXY94Or28t28,2698
babel/locale-data/sw_KE.dat,sha256=o7UlPWH8XMknIL_QFrQHFYzS7KRh9f0SFyekUCJZ_kE,4051
babel/locale-data/sw_TZ.dat,sha256=igleCvk2ZHZM9NoYRtnSd1s-oxuQjL1wLRvD_ZLBovo,607
babel/locale-data/sw_UG.dat,sha256=SmNCoGRulbvTy-uR4zcKFkELWxnfZRHTFQQCkMzL99g,628
babel/locale-data/ta.dat,sha256=hIoFhN-PxAoTw0W7Qiy0UXsD-qxIoiNeV-9jNuHr5Mk,256586
babel/locale-data/ta_IN.dat,sha256=U63SpEBo7jY11wGhZNMFeC3pyS0ExnmbUlmDjgOeuuo,649
babel/locale-data/ta_LK.dat,sha256=jTOsLhe3x-1CXvk6IDUuiEwrc0OvqkmsJmZjpQgDbpw,1199
babel/locale-data/ta_MY.dat,sha256=eO3c5Qh0zUMlQeFZgQSzQedaSz7mLTFlVDKR1Z-2VH0,1256
babel/locale-data/ta_SG.dat,sha256=1MxGlD45nwX3TAvpVESvP4CXrmxgqySQjeZjzH4Cofo,1275
babel/locale-data/te.dat,sha256=lPRz8YpMlzuXt_B5br8V3dYjMFTZ6metR2KM0xCF42Y,255657
babel/locale-data/te_IN.dat,sha256=nRtR37nSryged20zk3SQMpmOf-UuNFjL_2YFTMpyN7k,649
babel/locale-data/teo.dat,sha256=XiaLo3ISyEZi89ypqEUw0Yom5tknINNoJzne6gtXQio,16723
babel/locale-data/teo_KE.dat,sha256=adkRA-mbnesgQLGfXEqwmXSVQDi63TQyosUzSLt3Xu4,648
babel/locale-data/teo_UG.dat,sha256=jfrgUMX_AeETJJFHJAx4dXi9nXjywSwr6tZUQksea7c,608
babel/locale-data/tg.dat,sha256=mjaUWm9mGHcYYsk5vmlrFj9zTXf5WcJMRDGex2axIB4,29662
babel/locale-data/tg_TJ.dat,sha256=DvdJs-nLilO1-m1YgIOjEeY_f0Viv5FYjZ9VR8O6BAc,626
babel/locale-data/th.dat,sha256=hj2O37fbmx_Nb49pOFF2oniumaxVDlkOA5ULcI9hD8o,230260
babel/locale-data/th_TH.dat,sha256=msHIvTxB6gWoJP5-ClkGH_IedRgXpRQzZPj0oIyVkyU,626
babel/locale-data/ti.dat,sha256=l6soKgfd9j_zCwP2oOKDdY-P-8FUul0u8WvGiAW1VZ8,73057
babel/locale-data/ti_ER.dat,sha256=9n_DeoDAeCwpjxww70RaNjQFwSY0dAOwwdAxYhVhw_Q,976
babel/locale-data/ti_ET.dat,sha256=uo6aLFMfTxKNSCagtB8OrTa99SUAmiNUA1txEd0tHtE,626
babel/locale-data/tk.dat,sha256=TcKx03xQoOnnIbGUZGszP3MDMoy3Ncaq6ALq2j93j5w,165033
babel/locale-data/tk_TM.dat,sha256=te9O_n9WobfEaJnREwyPHVH4EMHcvdClBXZVPDWXKqA,626
babel/locale-data/to.dat,sha256=3Pl69HQ7EZkoRYPF_uD8XCinhZeYGlQLeYGRp8sLXco,155039
babel/locale-data/to_TO.dat,sha256=4uG_hh9oRlbSUKvlRBuU4uKaGSSs8WXIdQ6oPE9nsXw,607
babel/locale-data/tr.dat,sha256=bwVttS8wUkqEjHqWYckkC3mIBMvlR204MbbKx2RTxUk,199237
babel/locale-data/tr_CY.dat,sha256=O7i548Pkn7b38K67zCSjikwvr3fqUzgQFtw85HwftvE,1202
babel/locale-data/tr_TR.dat,sha256=60u8smbVf6BBW48PI-x3EpHvJ8tsbUSbJHhCqFwqEWY,626
babel/locale-data/tt.dat,sha256=_FR8BBZRdW9-Oio2iafGcu48Z9FtldL05h1qzhmM1OA,33295
babel/locale-data/tt_RU.dat,sha256=KMOS9m6E-8xqJQZQghSnhQdNjkNVu0dXsQkFwnfl7lM,644
babel/locale-data/twq.dat,sha256=OEwG2SXr9fEeQCMaHxjvKWlWLVtUhGamoG4qG6wjUmo,16224
babel/locale-data/twq_NE.dat,sha256=qI3KClszp7x1J6kAz3_warLhjR4nJfJo_KfvlGfrnLo,608
babel/locale-data/tzm.dat,sha256=ZYs6PgmbBXaPk6WRnQxKT5wzR0RzqXFPa12OWoO7dUc,16200
babel/locale-data/tzm_MA.dat,sha256=Iju1azWHoAUAq4eKXDIeqAfAEz4HrOo7iUtyrOvSpl4,608
babel/locale-data/ug.dat,sha256=TfYKG15NTf4fouUcNaqdusAKRxa1RISFoA8RYi4LN5s,128365
babel/locale-data/ug_CN.dat,sha256=U8dQkbFRAbqdAKISnFUw0FNUPmzrkJdLpK4n_14alKk,626
babel/locale-data/uk.dat,sha256=vdV4W3lJEEENgT6Z37OtNlhQPDq8sVw7nWzL3hCidSo,307335
babel/locale-data/uk_UA.dat,sha256=aG-RNa11ss0qW7vF50ajYL0qAxtKC-2ghG-P4W6vI8g,626
babel/locale-data/ur.dat,sha256=rw0vgCWeX-JSUbVwTUGvRddcKePa_yrnoFDb-_2SoW4,192380
babel/locale-data/ur_IN.dat,sha256=R-Q6ZyVoQOPtQr-7izba7emv8X589nguoMF477kk7-k,12616
babel/locale-data/ur_PK.dat,sha256=pjqQbfrTsrzcFl30FILINl2ELO3fBxiaRBhCr-KxWlE,626
babel/locale-data/uz.dat,sha256=9hnTiZ-2Ug_qye8uaQ_y6yORtJx6ufmXVo7yXEeRXv4,167959
babel/locale-data/uz_Arab.dat,sha256=5iCqhCA86006i6CS3bLMZ9IsxWlQoPmQKFbD5pRUJO4,4135
babel/locale-data/uz_Arab_AF.dat,sha256=Sx_uYNKZPq-kRomMbkZ9n8QRHjADxn9Jnga6fTVCn2s,669
babel/locale-data/uz_Cyrl.dat,sha256=GYkeuqND_cGOn_PWn6yDG-GJl-739xKvN51DjAFRRKs,98765
babel/locale-data/uz_Cyrl_UZ.dat,sha256=3wc5_aiBB_CAeYu5YRxPxZHUd0sZ4nIXafgKFDygfZs,626
babel/locale-data/uz_Latn.dat,sha256=U7k5KfSvCsFk2Atujg4-iVDmGikwr-vkjPTpb0D_vP8,1283
babel/locale-data/uz_Latn_UZ.dat,sha256=3aYJK9kmVFmLlDvDgL1ndruFKWOqVlHHJjag1NqV1a4,626
babel/locale-data/vai.dat,sha256=RqFFCMxtJ670vHHKuitqD_dDlWU2PBHYjgw-9yA8fTE,19046
babel/locale-data/vai_Latn.dat,sha256=X4-xL0o7OgV8SCyY9w5JPbtwU24MfloL1z1fS9FGL_A,15311
babel/locale-data/vai_Latn_LR.dat,sha256=wILwRiJahBELmxou0KnRkVMZReFA3tiFJzmb_OqZhNc,608
babel/locale-data/vai_Vaii.dat,sha256=HR8PB6naRA8D96Yn5Iy8ODPw8CLzpfrVoXyXd0apmLM,684
babel/locale-data/vai_Vaii_LR.dat,sha256=wILwRiJahBELmxou0KnRkVMZReFA3tiFJzmb_OqZhNc,608
babel/locale-data/vi.dat,sha256=q1bLXzF2MlouEux5Mf1jmX1aHrQ_ymF1F6o4hOSmCJY,163315
babel/locale-data/vi_VN.dat,sha256=ysK3bxwU2cv3FiZx5XYBq5kGCnsRAIYXefasVKTIKXE,626
babel/locale-data/vo.dat,sha256=oGgNNbnkX_qhkH1hjxpt0molwl0N68bFK4mP0Yh2k7o,5246
babel/locale-data/vo_001.dat,sha256=QsXTJYVDrzM24v2rKaWPgtSSN54Ege4iHGzzdxCElg0,841
babel/locale-data/vun.dat,sha256=n04FmbzLYMYEmZaOUMxpxLNF-v6OeB_RtLVQH6_vOc8,16119
babel/locale-data/vun_TZ.dat,sha256=0sIAdc6gRGhTbpGXbPkAwTzO7g-QBG0WdKsmjvFSxY0,608
babel/locale-data/wae.dat,sha256=qRzR0haJ9sG5gyquuNV8SZXHdSL-Ku8F0j9OGnYsIDE,28712
babel/locale-data/wae_CH.dat,sha256=7C-tFbrZbBSHRUeTh5pVAS-8twkecVsqzHHXfE5BJ-w,645
babel/locale-data/wo.dat,sha256=Wbq-Hjgk2y_GIAVbiGK7oT7Cdewbl20WN8w71cN75L4,25738
babel/locale-data/wo_SN.dat,sha256=SFe418ytRv4n5qYdd7PB5Z7wW2lMXKLaOZ6iTb1dTw4,607
babel/locale-data/xh.dat,sha256=aizGIMM4WRNrhmWoZSfTGjT2zPbTxyrZYokFCwFDYFI,15076
babel/locale-data/xh_ZA.dat,sha256=Si4fXBHGmX5L_vioWN-a_PowYRePLdkfsBgvfSIN_7c,626
babel/locale-data/xog.dat,sha256=MgoUNE6v6h13pdogvhGzpyc45PJlCUuLzk_xlQVSHFE,16608
babel/locale-data/xog_UG.dat,sha256=7Ft3wGj-9xqf-9AUUQlteuqRqcQKdmYx69SapgOjZCE,608
babel/locale-data/yav.dat,sha256=TgY99ASgQyjnMOCW4N2x3xonlWPNekJvOudZ53b325Q,15354
babel/locale-data/yav_CM.dat,sha256=7Zh5rEYu5q9jspiu_9pbQ3xewmLg_my-9Rt7rqOELnE,627
babel/locale-data/yi.dat,sha256=8oRirXWekzHxj00Uer9bzo08HWKIKAAS5MBFjfuipw4,30403
babel/locale-data/yi_001.dat,sha256=7Xf8CLNU0_JtRoWjwRdeOMQUxuCOIfMn6JgtuU3y2yk,903
babel/locale-data/yo.dat,sha256=ZufCWXkkr0h2lFvqXBzIrSTAfUOYV1aWOo_0-dgEr8Y,36073
babel/locale-data/yo_BJ.dat,sha256=v7sGbQbpNvx_925zeSHoAtlRlGj7aZuZ1Q-REvqaL9s,35277
babel/locale-data/yo_NG.dat,sha256=9XY2uokNDXCH8mbxl2lIRWAr0h39yAteR52zoOq7P7g,607
babel/locale-data/yue.dat,sha256=kDR-_CW4CAqqV-76GqPuvezZ55XloDm1cq9ckhoqPrc,176622
babel/locale-data/yue_Hans.dat,sha256=_GkNxpR7Ako3sJcHGfBiWl6aOswYZCGImDsM2Aj-BZ0,176532
babel/locale-data/yue_Hans_CN.dat,sha256=FzQmGCVsYnueDspYNGYj_DXHmMt_HLYG5gGmRFUHyT4,627
babel/locale-data/yue_Hant.dat,sha256=Na0Odc_YNhhVKjF2-IECRD83MTyW5JxDa214bVwKeSE,1297
babel/locale-data/yue_Hant_HK.dat,sha256=Ec0YPHv508DXmz4WTZyRXL3o1Vzt4g9aNtLy2QEC0ko,627
babel/locale-data/zgh.dat,sha256=D4nbMm0USubx9XKDP5MCvIe0QjMKsTWvZC_5EWOAYkQ,30568
babel/locale-data/zgh_MA.dat,sha256=jC43Fo4jRYvZlJw8YHkxMp397MnNJcV98LTJNXom-kQ,608
babel/locale-data/zh.dat,sha256=m43JIyhOjoSliRYPlfNr1BtwGB0weDIyhj8fgbX5iCs,178476
babel/locale-data/zh_Hans.dat,sha256=GjCjyfNKG8X93jBUM1Kz2xyOzaRo9c7d-wNjUxMQtC4,1296
babel/locale-data/zh_Hans_CN.dat,sha256=p5jtbCp3q1z8RyhHQbdEYtxxHeJZjE6hPHCZeaKuY18,626
babel/locale-data/zh_Hans_HK.dat,sha256=HFh5Iu49qjRP-aGP4Drj2Nd-afmy_nXurl2mz5CrsgE,4321
babel/locale-data/zh_Hans_MO.dat,sha256=4u8prMqf76bZEGZWmnttt8rFnOG9GQ9TJ-qvhDJMBV4,3266
babel/locale-data/zh_Hans_SG.dat,sha256=_d4ULzZoQycECN9bjIaWM7QT6NeqppuqZ_2J77SxEa8,3462
babel/locale-data/zh_Hant.dat,sha256=J-_FNz7Imicihh7kKd6t-AiB7JFNO_Et9D4L5BZL5GM,182449
babel/locale-data/zh_Hant_HK.dat,sha256=gna8uBk3yehPbTncX4g-xu_PKtqIx0m6wab3OFkocCo,54211
babel/locale-data/zh_Hant_MO.dat,sha256=ZAm_SZd9QU5d9yIpBY7zkXjftOuUsxO5n76GBC7PEr0,648
babel/locale-data/zh_Hant_TW.dat,sha256=HbzN_NAwhpX9xJ2wB2lCg-DcBsiCuP7lp3RK6xNtM58,626
babel/locale-data/zu.dat,sha256=Tk8uVFm5e3BT0K4TcTCBmh-KVbrfpP4EiUxAb97G5-M,170194
babel/locale-data/zu_ZA.dat,sha256=_bcquTfNmZuNRLAfnjIPVwShwnBdealyaQtkfbJB5bI,626
babel/localedata.py,sha256=YRA37osx1uFriIA6xsV4PH73pzZIRKAGC3DMAoiAhbY,7317
babel/localtime/__init__.py,sha256=97IqJD5FNnhre8jiYaEisp7PeWaBMhsb8EH0Gd2q5bM,1721
babel/localtime/__pycache__/__init__.cpython-37.pyc,,
babel/localtime/__pycache__/_unix.cpython-37.pyc,,
babel/localtime/__pycache__/_win32.cpython-37.pyc,,
babel/localtime/_unix.py,sha256=P66o3ErKXzhFvj3e3Qk6MBS7AR0qsDqSQclIAMHKp18,4801
babel/localtime/_win32.py,sha256=dGzhQ8AlY5iItSd-i3Fi2O3YWuVJ83PFSWe7EG2BaBg,3086
babel/messages/__init__.py,sha256=FslIS7Co5VK7Ec4g44kFO7m7zWw2-fQuu4gvTzqeIrk,254
babel/messages/__pycache__/__init__.cpython-37.pyc,,
babel/messages/__pycache__/catalog.cpython-37.pyc,,
babel/messages/__pycache__/checkers.cpython-37.pyc,,
babel/messages/__pycache__/extract.cpython-37.pyc,,
babel/messages/__pycache__/frontend.cpython-37.pyc,,
babel/messages/__pycache__/jslexer.cpython-37.pyc,,
babel/messages/__pycache__/mofile.cpython-37.pyc,,
babel/messages/__pycache__/plurals.cpython-37.pyc,,
babel/messages/__pycache__/pofile.cpython-37.pyc,,
babel/messages/catalog.py,sha256=kSj7z8QcQblXLvwyEC5AsuXZ1PnHEyWpBM49X0jUNqQ,32296
babel/messages/checkers.py,sha256=KwSkPIg3JJyjvhxkMOAAZFUs5ZQNae_wnMKRAYP6sis,6085
babel/messages/extract.py,sha256=E3mIosZGF6wNcVyylNIDbm1osgJsgRcOMGJnYNVLmus,26428
babel/messages/frontend.py,sha256=xN1sO2Y3Qp4FGj94vQToev3HeAzmFK5knWKtRzVpnzo,38729
babel/messages/jslexer.py,sha256=81Cun16nkMdbML2NAxixUls1fr2FHaW_-Uqju0wrL7s,6334
babel/messages/mofile.py,sha256=Ry-YGbadmabyB2I0WC8lGSBwLgYPXWOChzRILtcjs4A,7204
babel/messages/plurals.py,sha256=fXgXJ9kTllO0OjqQ-mqrpXW2lXNY1FTa9UrGrlaGBuc,7206
babel/messages/pofile.py,sha256=tGdqHM3tDSyDc6CFw9RwHDxCMQTEgacooKREqUnJBuk,21795
babel/numbers.py,sha256=wa9bdJvj5PZvM1wX7rzd4hdp3i6mx8nlk6et6aQKTF8,38551
babel/plural.py,sha256=rI02SVvzNQNniSh6TjfWIdwONJ3cE8JRS-V43P-KlC4,21314
babel/support.py,sha256=MSiTpW8BHfYAZMF6zfkfcv_pReGlmxDbAdNu96DzFjM,22303
babel/units.py,sha256=0Sl-FFQTRK36UMlFDZyDuTgRm1C8gT8SK02P3ViAr5M,11105
babel/util.py,sha256=JRSyuEezoLBMDDpIGrsgoV9bRTyOOS9CZEVppRbUjWo,7582

View file

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.33.1)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View file

@ -0,0 +1,22 @@
[console_scripts]
pybabel = babel.messages.frontend:main
[distutils.commands]
compile_catalog = babel.messages.frontend:compile_catalog
extract_messages = babel.messages.frontend:extract_messages
init_catalog = babel.messages.frontend:init_catalog
update_catalog = babel.messages.frontend:update_catalog
[distutils.setup_keywords]
message_extractors = babel.messages.frontend:check_message_extractors
[babel.checkers]
num_plurals = babel.messages.checkers:num_plurals
python_format = babel.messages.checkers:python_format
[babel.extractors]
ignore = babel.messages.extract:extract_nothing
python = babel.messages.extract:extract_python
javascript = babel.messages.extract:extract_javascript

View file

@ -0,0 +1 @@
babel

View file

@ -0,0 +1,41 @@
GitPython was originally written by Michael Trier.
GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich.
Contributors are:
-Michael Trier <mtrier _at_ gmail.com>
-Alan Briolat
-Florian Apolloner <florian _at_ apolloner.eu>
-David Aguilar <davvid _at_ gmail.com>
-Jelmer Vernooij <jelmer _at_ samba.org>
-Steve Frécinaux <code _at_ istique.net>
-Kai Lautaportti <kai _at_ lautaportti.fi>
-Paul Sowden <paul _at_ idontsmoke.co.uk>
-Sebastian Thiel <byronimo _at_ gmail.com>
-Jonathan Chu <jonathan.chu _at_ me.com>
-Vincent Driessen <me _at_ nvie.com>
-Phil Elson <pelson _dot_ pub _at_ gmail.com>
-Bernard `Guyzmo` Pratz <guyzmo+gitpython+pub@m0g.net>
-Timothy B. Hartman <tbhartman _at_ gmail.com>
-Konstantin Popov <konstantin.popov.89 _at_ yandex.ru>
-Peter Jones <pjones _at_ redhat.com>
-Anson Mansfield <anson.mansfield _at_ gmail.com>
-Ken Odegard <ken.odegard _at_ gmail.com>
-Alexis Horgix Chotard
-Piotr Babij <piotr.babij _at_ gmail.com>
-Mikuláš Poul <mikulaspoul _at_ gmail.com>
-Charles Bouchard-Légaré <cblegare.atl _at_ ntis.ca>
-Yaroslav Halchenko <debian _at_ onerussian.com>
-Tim Swast <swast _at_ google.com>
-William Luc Ritchie
-David Host <hostdm _at_ outlook.com>
-A. Jesse Jiryu Davis <jesse _at_ emptysquare.net>
-Steven Whitman <ninloot _at_ gmail.com>
-Stefan Stancu <stefan.stancu _at_ gmail.com>
-César Izurieta <cesar _at_ caih.org>
-Arthur Milchior <arthur _at_ milchior.fr>
-Anil Khatri <anil.soccer.khatri _at_ gmail.com>
-JJ Graham <thetwoj _at_ gmail.com>
-Ben Thayer <ben _at_ benthayer.com>
Portions derived from other open source works and are clearly marked.

View file

@ -0,0 +1,30 @@
Copyright (C) 2008, 2009 Michael Trier and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the GitPython project nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -0,0 +1,29 @@
Metadata-Version: 2.1
Name: GitPython
Version: 3.0.5
Summary: Python Git Library
Home-page: https://github.com/gitpython-developers/GitPython
Author: Sebastian Thiel, Michael Trier
Author-email: byronimo@gmail.com, mtrier@gmail.com
License: UNKNOWN
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Environment :: Console
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Operating System :: POSIX
Classifier: Operating System :: Microsoft :: Windows
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Requires-Python: >=3.0, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Requires-Dist: gitdb2 (>=2.0.0)
GitPython is a python library used to interact with Git repositories

View file

@ -0,0 +1,207 @@
GitPython-3.0.5.dist-info/AUTHORS,sha256=QleDRpZmrngZXrZxZVPInx1CGC6WBs2wgquWA4u1R48,1645
GitPython-3.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
GitPython-3.0.5.dist-info/LICENSE,sha256=_WV__CzvY9JceMq3gI1BTdA6KC5jiTSR_RHDL5i-Z_s,1521
GitPython-3.0.5.dist-info/METADATA,sha256=Dh4IlJPr-rEGh3U-aLCdO8p-myboJXkcU6MX0SJWAsI,1094
GitPython-3.0.5.dist-info/RECORD,,
GitPython-3.0.5.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
GitPython-3.0.5.dist-info/top_level.txt,sha256=0hzDuIp8obv624V3GmbqsagBWkk8ohtGU-Bc1PmTT0o,4
git/__init__.py,sha256=8bGCzvbncwo7QrZPEkY8mozm-JTkrGaNlSMCS-pFWHk,2334
git/__pycache__/__init__.cpython-37.pyc,,
git/__pycache__/cmd.cpython-37.pyc,,
git/__pycache__/compat.cpython-37.pyc,,
git/__pycache__/config.cpython-37.pyc,,
git/__pycache__/db.cpython-37.pyc,,
git/__pycache__/diff.cpython-37.pyc,,
git/__pycache__/exc.cpython-37.pyc,,
git/__pycache__/remote.cpython-37.pyc,,
git/__pycache__/util.cpython-37.pyc,,
git/cmd.py,sha256=gZn9tM9Oy6Xao8O_Q6_xOlhYYhzU5dagxQ4DA5Cwyk0,42869
git/compat.py,sha256=g00yP8eq-ap2IOXXo6mppHd-4Iw8q-d6tDvQegj5UYw,9436
git/config.py,sha256=HJnL92g8GmpwNgH2LOkV3Z1eRK3KGgOh-gbGmHP4fiQ,28415
git/db.py,sha256=bhsP6H_7pAnm5hUBKod8ZguYrmYMB2L4RgkNswiMgEo,1963
git/diff.py,sha256=QaI2R3uyB0k9YOuBTOmzdIF7Om9iWgEzHtmJVRQJ7Xc,19965
git/exc.py,sha256=j09XYwbEitorNgZNeNvsCnroQ-IUUM2f35rO2Hy8Myw,4907
git/index/__init__.py,sha256=Wj5zgJggZkEXueEDXdijxXahzxhextC08k70n0lHRN0,129
git/index/__pycache__/__init__.cpython-37.pyc,,
git/index/__pycache__/base.cpython-37.pyc,,
git/index/__pycache__/fun.cpython-37.pyc,,
git/index/__pycache__/typ.cpython-37.pyc,,
git/index/__pycache__/util.cpython-37.pyc,,
git/index/base.py,sha256=jkb965u7XrZ4rJFwUqbpfX2vx5AURcdt48KLag1NzKU,52209
git/index/fun.py,sha256=EmS1szuhLthtoVw9fJZf2QKyiXAbcgZjM9ghfxfUgoI,14280
git/index/typ.py,sha256=GLBZbDS3yScHJs0U18CX-heLTDjjGu6fN7T2L_NQr4A,4976
git/index/util.py,sha256=l6oh9_1KU1v5GQdpxqCOqs6WLt5xN1uWvkVHQqcCToA,2902
git/objects/__init__.py,sha256=6C02LlMygiFwTYtncz3GxEQfzHZr2WvUId0fnJ8HfLo,683
git/objects/__pycache__/__init__.cpython-37.pyc,,
git/objects/__pycache__/base.cpython-37.pyc,,
git/objects/__pycache__/blob.cpython-37.pyc,,
git/objects/__pycache__/commit.cpython-37.pyc,,
git/objects/__pycache__/fun.cpython-37.pyc,,
git/objects/__pycache__/tag.cpython-37.pyc,,
git/objects/__pycache__/tree.cpython-37.pyc,,
git/objects/__pycache__/util.cpython-37.pyc,,
git/objects/base.py,sha256=UZiyzyzx4_OJ3bWnwqb3mqh0LXT7oo0biYaTm-sLuAw,6689
git/objects/blob.py,sha256=evI3ptPmlln6gLpoQRvbIKjK4v59nT8ipd1vk1dGYtc,927
git/objects/commit.py,sha256=-4Wn1dvM1EM6Su_NNZH-FKdyBtXM18jK4TUORApnnos,20761
git/objects/fun.py,sha256=UKFhCINLA4X7YjnGH1hxF0Uj2sHsKbxFDd3rrQmRq6U,7352
git/objects/submodule/__init__.py,sha256=OsMeiex7cG6ev2f35IaJ5csH-eXchSoNKCt4HXUG5Ws,93
git/objects/submodule/__pycache__/__init__.cpython-37.pyc,,
git/objects/submodule/__pycache__/base.cpython-37.pyc,,
git/objects/submodule/__pycache__/root.cpython-37.pyc,,
git/objects/submodule/__pycache__/util.cpython-37.pyc,,
git/objects/submodule/base.py,sha256=s8H54844tc09U76G3-2lM2X3A518AGyVTD1rvEh81tM,53875
git/objects/submodule/root.py,sha256=N2i0PjRcw5bNLLIDAkviQjXhf9RvGSfVnbav4FNzkXo,17656
git/objects/submodule/util.py,sha256=VdgIG-cBo47b_7JcolAvjWaIMU0X5oImLjJ4wluc_iw,2745
git/objects/tag.py,sha256=OFeN6ZkLU5zVTz_1xuPNpz4YoEgpSpVZ1MMJnnUyGqE,3127
git/objects/tree.py,sha256=Ta1qAkuwzn7lk54_d7knqF2WL6DOc2MQG1k8mKLel1s,11069
git/objects/util.py,sha256=fmukvCi3HUjD9fE8AtNgn0qhsImKCJGZ5sdEwGiM358,12451
git/refs/__init__.py,sha256=3CRfAyE-Z78rJ3kSdKR1PNiXHEjHLw2VkU2JyDviNDU,242
git/refs/__pycache__/__init__.cpython-37.pyc,,
git/refs/__pycache__/head.cpython-37.pyc,,
git/refs/__pycache__/log.cpython-37.pyc,,
git/refs/__pycache__/reference.cpython-37.pyc,,
git/refs/__pycache__/remote.cpython-37.pyc,,
git/refs/__pycache__/symbolic.cpython-37.pyc,,
git/refs/__pycache__/tag.cpython-37.pyc,,
git/refs/head.py,sha256=KY_-Hgm3JDJParX380zxQv5-slxtTNnUE8xs--8nt9U,8706
git/refs/log.py,sha256=NI8RndjtjKzOoqo2hx_ThSQ1lt0trHMgJYW_1ML62_E,10918
git/refs/reference.py,sha256=OcQMwHJuelR1yKe1EF0IBfxeQZYv2kf0xunNSVwZV-M,4408
git/refs/remote.py,sha256=6JOyIurnomM3tNXdKRXfMK_V75gJNgr9_2sdevKU_tI,1670
git/refs/symbolic.py,sha256=TtPRNbt1dnki-_TAjAn3gP_h9Ixgba7z0rWcy7_WbQ8,26840
git/refs/tag.py,sha256=qoHwJ9suHx8u8NNg-6GvNftK36RnCNkpElRjh2r9wcI,2964
git/remote.py,sha256=aJsDcJwqGd3iqGBmDPrWzAlHMRYG1vUAnhjYS3J-k8k,35739
git/repo/__init__.py,sha256=ssUH4IVCoua5shI5h1l46P0X1kp82ydxVcH3PIVCnzg,108
git/repo/__pycache__/__init__.cpython-37.pyc,,
git/repo/__pycache__/base.cpython-37.pyc,,
git/repo/__pycache__/fun.cpython-37.pyc,,
git/repo/base.py,sha256=9h61NMN4IK4-kUBEmavZHlzO5fkJrux19YORfyeA8xs,44515
git/repo/fun.py,sha256=SuguBZs4sZE_SvAcfvn7yxXdoxKmgQdwUhgKAkeyISQ,11396
git/test/__init__.py,sha256=q-WCITGqFKTHnRFjUvJz5hUJBi8SP4InaAZRXZ8qj8k,220
git/test/__pycache__/__init__.cpython-37.pyc,,
git/test/__pycache__/test_actor.cpython-37.pyc,,
git/test/__pycache__/test_base.cpython-37.pyc,,
git/test/__pycache__/test_blob.cpython-37.pyc,,
git/test/__pycache__/test_commit.cpython-37.pyc,,
git/test/__pycache__/test_config.cpython-37.pyc,,
git/test/__pycache__/test_db.cpython-37.pyc,,
git/test/__pycache__/test_diff.cpython-37.pyc,,
git/test/__pycache__/test_docs.cpython-37.pyc,,
git/test/__pycache__/test_exc.cpython-37.pyc,,
git/test/__pycache__/test_fun.cpython-37.pyc,,
git/test/__pycache__/test_git.cpython-37.pyc,,
git/test/__pycache__/test_index.cpython-37.pyc,,
git/test/__pycache__/test_reflog.cpython-37.pyc,,
git/test/__pycache__/test_refs.cpython-37.pyc,,
git/test/__pycache__/test_remote.cpython-37.pyc,,
git/test/__pycache__/test_repo.cpython-37.pyc,,
git/test/__pycache__/test_stats.cpython-37.pyc,,
git/test/__pycache__/test_submodule.cpython-37.pyc,,
git/test/__pycache__/test_tree.cpython-37.pyc,,
git/test/__pycache__/test_util.cpython-37.pyc,,
git/test/fixtures/__pycache__/cat_file.cpython-37.pyc,,
git/test/fixtures/blame,sha256=4EDRSXdgbRtxHU_2lASFXC7eNShL2cVq3IU43tLWlD4,3663
git/test/fixtures/blame_binary,sha256=YLzoHqTAuv2Uv8IILh4ndQxJ_A1c09176E-3d5FMQsM,14807
git/test/fixtures/blame_complex_revision,sha256=tPguLsqmLxjuZWg5nRcdZCZeaBi-LOeVQEHfTX6X_B0,7645
git/test/fixtures/blame_incremental,sha256=3VXtrk8LVqfS5f2vsP5DTzFU3opeevUbENQUq22vTdw,982
git/test/fixtures/blame_incremental_2.11.1_plus,sha256=JDA_xCevOrOMDeKW-U8svYeA0E8Pa3sI7G8GALpxOHw,1154
git/test/fixtures/cat_file.py,sha256=7RDIymGyByw8I1OibenXM-DVsZ0_7gpazeYYG4C5GDM,136
git/test/fixtures/cat_file_blob,sha256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw,11
git/test/fixtures/cat_file_blob_nl,sha256=GJShnIW6FTrL90OsTkP8AEyJFgSyb4xp4eg-oq_HxI8,12
git/test/fixtures/cat_file_blob_size,sha256=JdTyqG3rXiV0uzIQtnuyT8xK-xn5OntloFfaqHSp0Y4,3
git/test/fixtures/commit_invalid_data,sha256=QlV-Pw5mw1Vhp6qivAQY5kcBP_BMJ_OIdLCinmes5Sw,242
git/test/fixtures/commit_with_gpgsig,sha256=3in_tJPkQv2K1wFx-PGqaCZQe40liMnl9cMYOJ8krTA,1387
git/test/fixtures/diff_2,sha256=sxE-xkV5lQrUEbpllp2X_AcFfPUmUr2wvSsc9qkZQLc,1994
git/test/fixtures/diff_2f,sha256=na11T8R1dhJUOKeO-fEeHymOxhXNrjvzzmA_r7x6oJM,732
git/test/fixtures/diff_abbrev-40_full-index_M_raw_no-color,sha256=AW-YEfutyH_RVyaP2nCTPhtjvkfqWi7NVL4s9Ab3Qww,109
git/test/fixtures/diff_change_in_type,sha256=Wo1iCaT1YBfGn5ZSJ40H7iVeqXKm-v-qJnsBUBKrpsI,319
git/test/fixtures/diff_change_in_type_raw,sha256=67KYtwIlQdTSwesABnIYTZxFgiwPhVyBXaDFoPXRFt4,108
git/test/fixtures/diff_copied_mode,sha256=rzKjhxG_HWuzMJAuGlVS6RKYV6g7Ko8lhc1CdwxGj-g,95
git/test/fixtures/diff_copied_mode_raw,sha256=dssv9FuXzR_-urJrkaZkBySxHosrGMyna4TxjVsOl-k,122
git/test/fixtures/diff_f,sha256=sNsG26bYvqU4pK_RwahaO-Lya8O9Xonwlyth8do_ptY,504
git/test/fixtures/diff_file_with_spaces,sha256=BOvQkq4AjQ_cR1e0iLYDQdNq2BLa-P5xhI4Xal7hYcE,216
git/test/fixtures/diff_i,sha256=792rEQvP9Q-MNxZ3_FsvhG5emE_q1nT9jpmQ_A1hFWE,5705
git/test/fixtures/diff_index_patch,sha256=qd9jD_eAQY5I9OLsbqdz3-lm_ncL2ALJhVLyj3enAfk,4598
git/test/fixtures/diff_index_raw,sha256=odNXPZQ4rlBnqYfJvvTKGS8QvfJE33WN_X-lIRMT8NI,101
git/test/fixtures/diff_initial,sha256=1RJTg7QSTdMGlqLDvjFUhKtV0bAV2NFW8rHBgzlVfyg,76
git/test/fixtures/diff_mode_only,sha256=pqDOHBLm09TWZ0orff-S7pCkQktD2sooW5mURG0vqLQ,46005
git/test/fixtures/diff_new_mode,sha256=b70EDNoC_gfq_P_fVFCIqT3WHU_P0l-1jhuR2cSEJFg,546
git/test/fixtures/diff_numstat,sha256=_Ls171vvsERXlRiJ1i1tA5vHyoYCzt3hKorFmic7UyE,22
git/test/fixtures/diff_p,sha256=3YlhR3UNFIPDv90Zn1vCXC46kQCVDuepUZIzwzD8xmk,19273
git/test/fixtures/diff_patch_binary,sha256=CLWigD0x0z3n_fpdh8LlkEyRUy7oDiWM-CJpGrqWPiM,155
git/test/fixtures/diff_patch_unsafe_paths,sha256=jsc2GM8j56puEDnMEhlBHG4jIhziN0uY8cuzGTTtHmw,3145
git/test/fixtures/diff_raw_binary,sha256=-PUPqf5wop8KkmubHnPK6RAVinlJuQf9Lqo4VBff23I,103
git/test/fixtures/diff_rename,sha256=-f4kqw0Zt1lRZZOmt5I0w9Jenbr3PngyTH2QeUQfv8g,415
git/test/fixtures/diff_rename_raw,sha256=VVBUjGEoXWWMYQFq-dyE708DijCnG974Qn79plVT39Q,112
git/test/fixtures/diff_tree_numstat_root,sha256=NbBofQm3wGm-1hyz8XKIoxMtC_bzz4x8TlxxuF8LLDU,63
git/test/fixtures/for_each_ref_with_path_component,sha256=hHVSiVHNEW5PKSPP4zFxxpYs4EYlPSJ9y-yykzkpWjk,84
git/test/fixtures/git_config,sha256=_Igi3In2TsksvwUdn7YcusMv-069ftMdlV1G7ZCs8nU,1517
git/test/fixtures/git_config-inc.cfg,sha256=jYjjNgfYBBkEAXYj5wLy7en-ISXbvVyOOfOmKsURYdc,92
git/test/fixtures/git_config_global,sha256=_tFDHYTW1Hxue2WXqjafVm_b9eM-OjTV6WTD2yZ3aqM,366
git/test/fixtures/git_config_multiple,sha256=xhyn_df95CrbWfA_YWV_Y1eR9bpbc-xZxWAnzCJTUU4,121
git/test/fixtures/git_config_with_comments,sha256=Q9IHrB4KE3l15iXoYD9-4TIMyd_rFczQ1CPAu-CI8bU,3997
git/test/fixtures/git_config_with_empty_value,sha256=686iisjxnex4YeT4qWdjsQh22X8UDw5yzKSerefFSTM,35
git/test/fixtures/git_file,sha256=44Qr9_8TluxWGPiPjDT4dEyF8x3fvnA9W7moDNiFAKo,16
git/test/fixtures/index,sha256=OBeM4XodizcBFgK_7S92fdjNTaitNxGzSBkcHXXWQvs,163616
git/test/fixtures/index_merge,sha256=IdtRRV85gi9dGFC4LNuGrZU2yttGAAANeS0_qvNO85w,9192
git/test/fixtures/issue-301_stderr,sha256=z6QL_UgCKQ1MMviNQNdhM22hOgp00zfJyc5LCm7Jl64,302879
git/test/fixtures/ls_tree_a,sha256=uBvIY8-7HnaBvSsVYigYJdsbeslxrtfeXh-tWXKtOnc,429
git/test/fixtures/ls_tree_b,sha256=pW3aIRcXMA1ZSE36049fJWeiVQl95qk_31U8Eh3Tc1c,119
git/test/fixtures/ls_tree_commit,sha256=cOgzX5Qcqvy4LU4dIBkcc63ccrOPBLab5DsCQPVpz_E,173
git/test/fixtures/ls_tree_empty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
git/test/fixtures/reflog_HEAD,sha256=8J5zwsJRmdb8xdWyQoRUzJYJnDSaeo0rLa5uthBovyQ,114210
git/test/fixtures/reflog_invalid_date,sha256=VlAYk2kGs3CySphJV0OmWwpWZK_uB9FxICTICZuKwSM,409
git/test/fixtures/reflog_invalid_email,sha256=1OoNdoGKNcNKWVQAv5ZKSgVEt0zSkigvHOTs3MMhVW0,411
git/test/fixtures/reflog_invalid_newsha,sha256=i-xph-C12dZT-dEKWS4VTDtX4AzQVUcCF3KXfMp9Gu0,404
git/test/fixtures/reflog_invalid_oldsha,sha256=guzXH-wQOfz3yQJFMChzhuXcgQ6G6rGTSwlIdBVX8Wg,398
git/test/fixtures/reflog_invalid_sep,sha256=0D9WHWpIGE2tQXD8utDcq-bbxdgVnWWCAMK_vwI3-zA,415
git/test/fixtures/reflog_master,sha256=K1-VX1oQ3gM_23qTjVV-8yQOXeXuRtePgUXAE6D1TVo,31286
git/test/fixtures/rev_list,sha256=pJPFZuJGwLzQ6m4P2d7VNaRLdMefGxxtztgU9fQfCCU,123
git/test/fixtures/rev_list_bisect_all,sha256=r0gnyZwq-IVHxNss4qE6zMv29PEcLyE0t_fV4MKISHc,2172
git/test/fixtures/rev_list_commit_diffs,sha256=n8qhU8FHEqr7Z8z8PvRGEODveuPbFIuaXB8UYGTqTPc,306
git/test/fixtures/rev_list_commit_idabbrev,sha256=W_cHcxor5sFGeS8-nmIpWNim-wtFY7636Hwh04Sfve8,271
git/test/fixtures/rev_list_commit_stats,sha256=1bZgYDN3iqjdIiZtYUuPNZXcyJYlDiusy3dw5utnr3M,244
git/test/fixtures/rev_list_count,sha256=wyBmlaA46bFntXaF6nx28phdDPwTZVW5kJr71pRrmb0,26855
git/test/fixtures/rev_list_delta_a,sha256=ikrcoYkO311vbCS_xoeyKE6myYKlKP5by88KU4oG6qI,328
git/test/fixtures/rev_list_delta_b,sha256=iiTGJRF2nzZrsHLXB1oOcZaoLvnSGAB3B9PLt5acmno,451
git/test/fixtures/rev_list_single,sha256=YqAJowQ_ujS8kUnNfBlm8ibKY7ki5vu2nXc_vt-4nq0,293
git/test/fixtures/rev_parse,sha256=y9iM5H6QPxDLEoGO9D4qSMBuDw4nz196c5VMflC1rak,8
git/test/fixtures/show_empty_commit,sha256=xeKoNCOFUPZcSztV3olKSs6u14fVdHwjnkGYLsEcZn8,252
git/test/fixtures/uncommon_branch_prefix_FETCH_HEAD,sha256=NO36DB4HWl4sOisR6EdFroTDakA-4XOx2kk4lFQIsiQ,603
git/test/fixtures/uncommon_branch_prefix_stderr,sha256=4-rJlXvPu-1ByjZzsUUJXFruPRxan7C5ssNtM7qZbeo,324
git/test/lib/__init__.py,sha256=k2xMRT9FC0m3yX_iMKaDcyuuZe0tGSr95ork3VOaeWk,414
git/test/lib/__pycache__/__init__.cpython-37.pyc,,
git/test/lib/__pycache__/asserts.cpython-37.pyc,,
git/test/lib/__pycache__/helper.cpython-37.pyc,,
git/test/lib/asserts.py,sha256=_9sOUHopeO-3PZOkxMXfTWaTxxPaWwmpnAVaDxpcaWk,2273
git/test/lib/helper.py,sha256=TI69pdx0xIMhfzOzBDB3BwqPvPsykp9bUXiyw2B0Xd8,13592
git/test/performance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
git/test/performance/__pycache__/__init__.cpython-37.pyc,,
git/test/performance/__pycache__/lib.cpython-37.pyc,,
git/test/performance/__pycache__/test_commit.cpython-37.pyc,,
git/test/performance/__pycache__/test_odb.cpython-37.pyc,,
git/test/performance/__pycache__/test_streams.cpython-37.pyc,,
git/test/performance/lib.py,sha256=qSicSiyRI30rP3EFeVoevC_sBDgXDFtZKIFr_Ikz84g,2427
git/test/performance/test_commit.py,sha256=ws8ORcvg3h0eXkI2G7a4OEl5QFG-9s2Agf0ut_8sUqU,3732
git/test/performance/test_odb.py,sha256=knbDhq2sRagwyGHKQ7uNZLWN8bzYt_VF6bNucoON6dI,2651
git/test/performance/test_streams.py,sha256=YriRvZ8i-yhMtQ5UdyGUt-X-fKhSddACwtT5e09bDyE,5816
git/test/test_actor.py,sha256=1bYmrTwWAYT_Qj9l9chbvuI8nNtHY6yGlDRJDDEq9A0,1242
git/test/test_base.py,sha256=k6I5nG7ZeBCYpXwi3HX_mvURFelgvQFys5pWVQR6kjw,5649
git/test/test_blob.py,sha256=Bs4FWke9Sjzx06EJuG9hh1T5qBgJEEz4aBCcr3cW9L0,878
git/test/test_commit.py,sha256=I9bHaid6bAR9vdEYyYLxP0Dfosn0vJ_ylCb_r-BYttI,15442
git/test/test_config.py,sha256=ZbUjlEwmIAKlpbpiJm8lizNUaopxLfslzsHiHtiaJMY,16611
git/test/test_db.py,sha256=e9UNddyQfoa-kzZo-XyrwVuYiq887NUkYrK8wZkTu9M,939
git/test/test_diff.py,sha256=49pEtXt6KTm3fHT3x0C1N7eh-lU4FY1pGDCZ8-k9bpw,15797
git/test/test_docs.py,sha256=XZZnXG7ya-liddMy39Ao6YtsRvYY3pXKMQXAW3gmulI,25340
git/test/test_exc.py,sha256=0DBYNiYVfPVlFKYRzqsoZUJnf0lQiUDmdrRIIHWeSlE,5123
git/test/test_fun.py,sha256=a91XgGk-YPwlgJEc-gy2tI_ilSq29XSQEywwc-kDnG0,10456
git/test/test_git.py,sha256=Jxd8gd0NKCnWPP0q9XbRdaMs6ZBb8xHaNRULYaAuWRk,11164
git/test/test_index.py,sha256=SoT5SRXnbsITU9zTkbCrUNUhnGqYer3T7HIs3O1C9pU,37348
git/test/test_reflog.py,sha256=vfI-NQCtnGlJEUtYR0_k7Y1Hc4pZQ5F_T4T49hxSnNU,3505
git/test/test_refs.py,sha256=2rNm9HdJZTWXx775JHG_R9Pd5X022IQ9C2CbP_9vDoE,23357
git/test/test_remote.py,sha256=pdrahbBiS513mS4oBLME2-pAsg0aMCYH5OoYaB9fD04,27019
git/test/test_repo.py,sha256=LkgJY_MC4F_2ZRgyDZ_zIQixuv3dcMuNkZP4Y9ChAlY,40101
git/test/test_stats.py,sha256=qmF2lL1wW0tEd17E-tkjmpPFVXzjREf7KW5JMCTQ4Zg,971
git/test/test_submodule.py,sha256=yyMisD-6UH0Im4sAKGgG1XTNMIBTbs5bRAz-3iZivOw,41981
git/test/test_tree.py,sha256=nR5OAQZLhv7kISoL3RO8ppkXAbKFYz3XlPAxABU1b4o,4046
git/test/test_util.py,sha256=BWEFonEy5ZasCvNRKWqfqfnEQ3wVHVtMqnWkqfmfqAI,9308
git/util.py,sha256=uJX0Q4FXyFYBhRQPWj6Cbe5KJCI7pvQzOn0wTSFtuKE,31606

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.33.6)
Root-Is-Purelib: true
Tag: py3-none-any

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,174 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

View file

@ -0,0 +1,196 @@
Metadata-Version: 2.1
Name: PyNaCl
Version: 1.3.0
Summary: Python binding to the Networking and Cryptography (NaCl) library
Home-page: https://github.com/pyca/pynacl/
Author: The PyNaCl developers
Author-email: cryptography-dev@python.org
License: Apache License 2.0
Platform: UNKNOWN
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Provides-Extra: docs
Provides-Extra: tests
Requires-Dist: six
Requires-Dist: cffi (>=1.4.1)
Provides-Extra: docs
Requires-Dist: sphinx (>=1.6.5); extra == 'docs'
Requires-Dist: sphinx-rtd-theme; extra == 'docs'
Provides-Extra: tests
Requires-Dist: pytest (!=3.3.0,>=3.2.1); extra == 'tests'
Requires-Dist: hypothesis (>=3.27.0); extra == 'tests'
===============================================
PyNaCl: Python binding to the libsodium library
===============================================
.. image:: https://img.shields.io/pypi/v/pynacl.svg
:target: https://pypi.org/project/PyNaCl/
:alt: Latest Version
.. image:: https://travis-ci.org/pyca/pynacl.svg?branch=master
:target: https://travis-ci.org/pyca/pynacl
.. image:: https://codecov.io/github/pyca/pynacl/coverage.svg?branch=master
:target: https://codecov.io/github/pyca/pynacl?branch=master
PyNaCl is a Python binding to `libsodium`_, which is a fork of the
`Networking and Cryptography library`_. These libraries have a stated goal of
improving usability, security and speed. It supports Python 2.7 and 3.4+ as
well as PyPy 2.6+.
.. _libsodium: https://github.com/jedisct1/libsodium
.. _Networking and Cryptography library: https://nacl.cr.yp.to/
Features
--------
* Digital signatures
* Secret-key encryption
* Public-key encryption
* Hashing and message authentication
* Password based key derivation and password hashing
Installation
============
Binary wheel install
--------------------
PyNaCl ships as a binary wheel on OS X, Windows and Linux ``manylinux1`` [#many]_ ,
so all dependencies are included. Make sure you have an up-to-date pip
and run:
.. code-block:: console
$ pip install pynacl
Linux source build
------------------
PyNaCl relies on `libsodium`_, a portable C library. A copy is bundled
with PyNaCl so to install you can run:
.. code-block:: console
$ pip install pynacl
If you'd prefer to use the version of ``libsodium`` provided by your
distribution, you can disable the bundled copy during install by running:
.. code-block:: console
$ SODIUM_INSTALL=system pip install pynacl
.. warning:: Usage of the legacy ``easy_install`` command provided by setuptools
is generally discouraged, and is completely unsupported in PyNaCl's case.
.. _libsodium: https://github.com/jedisct1/libsodium
.. [#many] `manylinux1 wheels <https://www.python.org/dev/peps/pep-0513/>`_
are built on a baseline linux environment based on Centos 5.11
and should work on most x86 and x86_64 glibc based linux environments.
Changelog
=========
1.3.0 2018-09-26
----------------
* Added support for Python 3.7.
* Update ``libsodium`` to 1.0.16.
* Run and test all code examples in PyNaCl docs through sphinx's
doctest builder.
* Add low-level bindings for chacha20-poly1305 AEAD constructions.
* Add low-level bindings for the chacha20-poly1305 secretstream constructions.
* Add low-level bindings for ed25519ph pre-hashed signing construction.
* Add low-level bindings for constant-time increment and addition
on fixed-precision big integers represented as little-endian
byte sequences.
* Add low-level bindings for the ISO/IEC 7816-4 compatible padding API.
* Add low-level bindings for libsodium's crypto_kx... key exchange
construction.
* Set hypothesis deadline to None in tests/test_pwhash.py to avoid
incorrect test failures on slower processor architectures. GitHub
issue #370
1.2.1 - 2017-12-04
------------------
* Update hypothesis minimum allowed version.
* Infrastructure: add proper configuration for readthedocs builder
runtime environment.
1.2.0 - 2017-11-01
------------------
* Update ``libsodium`` to 1.0.15.
* Infrastructure: add jenkins support for automatic build of
``manylinux1`` binary wheels
* Added support for ``SealedBox`` construction.
* Added support for ``argon2i`` and ``argon2id`` password hashing constructs
and restructured high-level password hashing implementation to expose
the same interface for all hashers.
* Added support for 128 bit ``siphashx24`` variant of ``siphash24``.
* Added support for ``from_seed`` APIs for X25519 keypair generation.
* Dropped support for Python 3.3.
1.1.2 - 2017-03-31
------------------
* reorder link time library search path when using bundled
libsodium
1.1.1 - 2017-03-15
------------------
* Fixed a circular import bug in ``nacl.utils``.
1.1.0 - 2017-03-14
------------------
* Dropped support for Python 2.6.
* Added ``shared_key()`` method on ``Box``.
* You can now pass ``None`` to ``nonce`` when encrypting with ``Box`` or
``SecretBox`` and it will automatically generate a random nonce.
* Added support for ``siphash24``.
* Added support for ``blake2b``.
* Added support for ``scrypt``.
* Update ``libsodium`` to 1.0.11.
* Default to the bundled ``libsodium`` when compiling.
* All raised exceptions are defined mixing-in
``nacl.exceptions.CryptoError``
1.0.1 - 2016-01-24
------------------
* Fix an issue with absolute paths that prevented the creation of wheels.
1.0 - 2016-01-23
----------------
* PyNaCl has been ported to use the new APIs available in cffi 1.0+.
Due to this change we no longer support PyPy releases older than 2.6.
* Python 3.2 support has been dropped.
* Functions to convert between Ed25519 and Curve25519 keys have been added.
0.3.0 - 2015-03-04
------------------
* The low-level API (`nacl.c.*`) has been changed to match the
upstream NaCl C/C++ conventions (as well as those of other NaCl bindings).
The order of arguments and return values has changed significantly. To
avoid silent failures, `nacl.c` has been removed, and replaced with
`nacl.bindings` (with the new argument ordering). If you have code which
calls these functions (e.g. `nacl.c.crypto_box_keypair()`), you must review
the new docstrings and update your code/imports to match the new
conventions.

View file

@ -0,0 +1,65 @@
PyNaCl-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyNaCl-1.3.0.dist-info/LICENSE.txt,sha256=0xdK1j5yHUydzLitQyCEiZLTFDabxGMZcgtYAskVP-k,9694
PyNaCl-1.3.0.dist-info/METADATA,sha256=pl3lEk7jyuFK7MNHloWA3m28tYU6xHI-S4le43id_x8,6632
PyNaCl-1.3.0.dist-info/RECORD,,
PyNaCl-1.3.0.dist-info/WHEEL,sha256=C4bGFJmj_qggBmsPGIGQ0FKvkClHeS8w8oo07-tVF_E,108
PyNaCl-1.3.0.dist-info/top_level.txt,sha256=wfdEOI_G2RIzmzsMyhpqP17HUh6Jcqi99to9aHLEslo,13
nacl/__init__.py,sha256=PS9BuXZoCwSvrDpB8HXldTHnA6lb4y00IRi3uqdW5_E,1170
nacl/__pycache__/__init__.cpython-37.pyc,,
nacl/__pycache__/encoding.cpython-37.pyc,,
nacl/__pycache__/exceptions.cpython-37.pyc,,
nacl/__pycache__/hash.cpython-37.pyc,,
nacl/__pycache__/hashlib.cpython-37.pyc,,
nacl/__pycache__/public.cpython-37.pyc,,
nacl/__pycache__/secret.cpython-37.pyc,,
nacl/__pycache__/signing.cpython-37.pyc,,
nacl/__pycache__/utils.cpython-37.pyc,,
nacl/_sodium.abi3.so,sha256=sv2gxhpN8C_iHXhPays7QUezt3axiim7W9zm6ddQzH8,2486075
nacl/bindings/__init__.py,sha256=dNH1zFjW87qszsld5oy6xMf2S1w2v_qshQwYHp66pz4,14943
nacl/bindings/__pycache__/__init__.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_aead.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_box.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_generichash.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_hash.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_kx.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_pwhash.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_scalarmult.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_secretbox.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_secretstream.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_shorthash.cpython-37.pyc,,
nacl/bindings/__pycache__/crypto_sign.cpython-37.pyc,,
nacl/bindings/__pycache__/randombytes.cpython-37.pyc,,
nacl/bindings/__pycache__/sodium_core.cpython-37.pyc,,
nacl/bindings/__pycache__/utils.cpython-37.pyc,,
nacl/bindings/crypto_aead.py,sha256=DE5zdi09GeHZxvmrhHtxVuTqF61y1cs8trTGh_6uP8Q,17335
nacl/bindings/crypto_box.py,sha256=hbHJetr9id5OvkbJwJoeqRQAhqSIGwWC2aXRAF5oPE4,9708
nacl/bindings/crypto_generichash.py,sha256=-e4b4DaopLBQHhEjLSjEoumy5fOs4QdTb-hou1S34C4,8010
nacl/bindings/crypto_hash.py,sha256=7Xp4mpXr4cpn-hAOU66KlYVUCVHP6deT0v_eW4UZZXo,2243
nacl/bindings/crypto_kx.py,sha256=2Gjxu5c7IKAwW2MOJa9zEn1EgpIVQ0tbZQs33REZb38,6937
nacl/bindings/crypto_pwhash.py,sha256=lWhEFKmXzFhKnzzxtWDwozs0CseZDkGgTJaI4YQ5rak,16898
nacl/bindings/crypto_scalarmult.py,sha256=VA2khmlUrnR24KK0CAdDw2dQ0jiYkku9-_NA-f1p21c,1803
nacl/bindings/crypto_secretbox.py,sha256=luvzB3lwBwXxKm63e9nA2neGtOXeeG8R9SyWEckIqdI,2864
nacl/bindings/crypto_secretstream.py,sha256=gdKinW10jP3CZ51hanE40s6e39rz8iuajdXTSBSKVcM,10474
nacl/bindings/crypto_shorthash.py,sha256=eVUE8byB1RjI0AoHib5BdZSSLtSqtdIcHgPCPWf2OZM,2189
nacl/bindings/crypto_sign.py,sha256=uA0RdHM4vsBDNhph2f7fcuI_9K8vvW-4hNHjajTIVU0,9641
nacl/bindings/randombytes.py,sha256=eThts6s-9xBXOl3GNzT57fV1dZUhzPjjAmAVIUHfcrc,988
nacl/bindings/sodium_core.py,sha256=52z0K7y6Ge6IlXcysWDVN7UdYcTOij6v0Cb0OLo8_Qc,1079
nacl/bindings/utils.py,sha256=jOKsDbsjxN9v_HI8DOib72chyU3byqbynXxbiV909-g,4420
nacl/encoding.py,sha256=tOiyIQVVpGU6A4Lzr0tMuqomhc_Aj0V_c1t56a-ZtPw,1928
nacl/exceptions.py,sha256=SG0BNtXnzmppI9in6xMTSizh1ryfgUIvIVMQv_A0bs8,1858
nacl/hash.py,sha256=4DKlmqpWOZJLhzTPk7_JSGXQ32lJULsS3AzJCGsibus,5928
nacl/hashlib.py,sha256=gMxOu-lIlKYr3ywSCjsJRBksYgpU2dvXgaAEfQz7PEg,3909
nacl/public.py,sha256=-nwQof5ov-wSSdvvoXh-FavTtjfpRnYykZkatNKyLd0,13442
nacl/pwhash/__init__.py,sha256=CN0mP6yteSYp3ui-DyWR1vjULNrXVN_gQ72CmTPoao0,2695
nacl/pwhash/__pycache__/__init__.cpython-37.pyc,,
nacl/pwhash/__pycache__/_argon2.cpython-37.pyc,,
nacl/pwhash/__pycache__/argon2i.cpython-37.pyc,,
nacl/pwhash/__pycache__/argon2id.cpython-37.pyc,,
nacl/pwhash/__pycache__/scrypt.cpython-37.pyc,,
nacl/pwhash/_argon2.py,sha256=Eu3-juLws3_v1gNy5aeSVPEwuRVFdGOrfeF0wPH9VHA,1878
nacl/pwhash/argon2i.py,sha256=EpheK0UHJvZYca_EMhhOcX5GXaOr0xCjFDTIgmSCSDo,4598
nacl/pwhash/argon2id.py,sha256=IqNm5RQNEd1Z9F-bEWT-_Y9noU26QoTR5YdWONg1uuI,4610
nacl/pwhash/scrypt.py,sha256=F9iUKbzZUMG2ZXuuk70p4KXI_nItue3VA39zmwOESE8,6025
nacl/secret.py,sha256=jf4WuUjnnXTekZ2elGgQozZl6zGzxGY_0Nw0fwehUlg,5430
nacl/signing.py,sha256=ZwA1l31ZgOIw_sAjiUPkzEo07uYYi8SE7Ni0G_R8ksQ,7302
nacl/utils.py,sha256=hhmIriBM7Bwyh3beTrqVqDDucai5gXlSliAMVrxIHPI,1691

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.31.1)
Root-Is-Purelib: false
Tag: cp34-abi3-manylinux1_x86_64

View file

@ -0,0 +1,2 @@
_sodium
nacl

View file

@ -0,0 +1,181 @@
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 1.3.11
Summary: Database Abstraction Library
Home-page: http://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Description: SQLAlchemy
==========
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-
initiated decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
http://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <http://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <http://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <http://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<http://www.opensource.org/licenses/mit-license.php>`_.
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Classifier: Operating System :: OS Independent
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Provides-Extra: mssql
Provides-Extra: postgresql
Provides-Extra: postgresql_psycopg2binary
Provides-Extra: postgresql_psycopg2cffi
Provides-Extra: mssql_pyodbc
Provides-Extra: pymysql
Provides-Extra: oracle
Provides-Extra: mssql_pymssql
Provides-Extra: mysql
Provides-Extra: postgresql_pg8000

View file

@ -0,0 +1,847 @@
AUTHORS
CHANGES
LICENSE
MANIFEST.in
README.dialects.rst
README.rst
README.unittests.rst
setup.cfg
setup.py
tox.ini
doc/contents.html
doc/copyright.html
doc/errors.html
doc/genindex.html
doc/glossary.html
doc/index.html
doc/intro.html
doc/notfound.html
doc/search.html
doc/searchindex.js
doc/_images/sqla_arch_small.png
doc/_images/sqla_engine_arch.png
doc/_modules/index.html
doc/_modules/examples/adjacency_list/adjacency_list.html
doc/_modules/examples/association/basic_association.html
doc/_modules/examples/association/dict_of_sets_with_default.html
doc/_modules/examples/association/proxied_association.html
doc/_modules/examples/custom_attributes/active_column_defaults.html
doc/_modules/examples/custom_attributes/custom_management.html
doc/_modules/examples/custom_attributes/listen_for_events.html
doc/_modules/examples/dogpile_caching/advanced.html
doc/_modules/examples/dogpile_caching/caching_query.html
doc/_modules/examples/dogpile_caching/environment.html
doc/_modules/examples/dogpile_caching/fixture_data.html
doc/_modules/examples/dogpile_caching/helloworld.html
doc/_modules/examples/dogpile_caching/local_session_caching.html
doc/_modules/examples/dogpile_caching/model.html
doc/_modules/examples/dogpile_caching/relationship_caching.html
doc/_modules/examples/dynamic_dict/dynamic_dict.html
doc/_modules/examples/elementtree/adjacency_list.html
doc/_modules/examples/elementtree/optimized_al.html
doc/_modules/examples/elementtree/pickle_type.html
doc/_modules/examples/generic_associations/discriminator_on_association.html
doc/_modules/examples/generic_associations/generic_fk.html
doc/_modules/examples/generic_associations/table_per_association.html
doc/_modules/examples/generic_associations/table_per_related.html
doc/_modules/examples/graphs/directed_graph.html
doc/_modules/examples/inheritance/concrete.html
doc/_modules/examples/inheritance/joined.html
doc/_modules/examples/inheritance/single.html
doc/_modules/examples/join_conditions/cast.html
doc/_modules/examples/join_conditions/threeway.html
doc/_modules/examples/large_collection/large_collection.html
doc/_modules/examples/materialized_paths/materialized_paths.html
doc/_modules/examples/nested_sets/nested_sets.html
doc/_modules/examples/performance/__main__.html
doc/_modules/examples/performance/bulk_inserts.html
doc/_modules/examples/performance/bulk_updates.html
doc/_modules/examples/performance/large_resultsets.html
doc/_modules/examples/performance/short_selects.html
doc/_modules/examples/performance/single_inserts.html
doc/_modules/examples/postgis/postgis.html
doc/_modules/examples/sharding/attribute_shard.html
doc/_modules/examples/space_invaders/space_invaders.html
doc/_modules/examples/versioned_history/history_meta.html
doc/_modules/examples/versioned_history/test_versioning.html
doc/_modules/examples/versioned_rows/versioned_map.html
doc/_modules/examples/versioned_rows/versioned_rows.html
doc/_modules/examples/versioned_rows/versioned_rows_w_versionid.html
doc/_modules/examples/versioned_rows/versioned_update_old_row.html
doc/_modules/examples/vertical/dictlike-polymorphic.html
doc/_modules/examples/vertical/dictlike.html
doc/_static/basic.css
doc/_static/changelog.css
doc/_static/detectmobile.js
doc/_static/docs.css
doc/_static/doctools.js
doc/_static/documentation_options.js
doc/_static/dragons.png
doc/_static/file.png
doc/_static/init.js
doc/_static/jquery-3.4.1.js
doc/_static/jquery.js
doc/_static/language_data.js
doc/_static/minus.png
doc/_static/plus.png
doc/_static/pygments.css
doc/_static/searchtools.js
doc/_static/sphinx_paramlinks.css
doc/_static/underscore-1.3.1.js
doc/_static/underscore.js
doc/build/Makefile
doc/build/conf.py
doc/build/contents.rst
doc/build/copyright.rst
doc/build/corrections.py
doc/build/errors.rst
doc/build/glossary.rst
doc/build/index.rst
doc/build/intro.rst
doc/build/requirements.txt
doc/build/sqla_arch_small.png
doc/build/changelog/changelog_01.rst
doc/build/changelog/changelog_02.rst
doc/build/changelog/changelog_03.rst
doc/build/changelog/changelog_04.rst
doc/build/changelog/changelog_05.rst
doc/build/changelog/changelog_06.rst
doc/build/changelog/changelog_07.rst
doc/build/changelog/changelog_08.rst
doc/build/changelog/changelog_09.rst
doc/build/changelog/changelog_10.rst
doc/build/changelog/changelog_11.rst
doc/build/changelog/changelog_12.rst
doc/build/changelog/changelog_13.rst
doc/build/changelog/index.rst
doc/build/changelog/migration_04.rst
doc/build/changelog/migration_05.rst
doc/build/changelog/migration_06.rst
doc/build/changelog/migration_07.rst
doc/build/changelog/migration_08.rst
doc/build/changelog/migration_09.rst
doc/build/changelog/migration_10.rst
doc/build/changelog/migration_11.rst
doc/build/changelog/migration_12.rst
doc/build/changelog/migration_13.rst
doc/build/changelog/unreleased_10/4065.rst
doc/build/changelog/unreleased_10/README.txt
doc/build/changelog/unreleased_11/README.txt
doc/build/changelog/unreleased_12/README.txt
doc/build/changelog/unreleased_13/README.txt
doc/build/core/api_basics.rst
doc/build/core/compiler.rst
doc/build/core/connections.rst
doc/build/core/constraints.rst
doc/build/core/custom_types.rst
doc/build/core/ddl.rst
doc/build/core/defaults.rst
doc/build/core/dml.rst
doc/build/core/engines.rst
doc/build/core/engines_connections.rst
doc/build/core/event.rst
doc/build/core/events.rst
doc/build/core/exceptions.rst
doc/build/core/expression_api.rst
doc/build/core/functions.rst
doc/build/core/index.rst
doc/build/core/inspection.rst
doc/build/core/interfaces.rst
doc/build/core/internals.rst
doc/build/core/metadata.rst
doc/build/core/pooling.rst
doc/build/core/reflection.rst
doc/build/core/schema.rst
doc/build/core/selectable.rst
doc/build/core/serializer.rst
doc/build/core/sqla_engine_arch.png
doc/build/core/sqlelement.rst
doc/build/core/tutorial.rst
doc/build/core/type_api.rst
doc/build/core/type_basics.rst
doc/build/core/types.rst
doc/build/core/visitors.rst
doc/build/dialects/firebird.rst
doc/build/dialects/index.rst
doc/build/dialects/mssql.rst
doc/build/dialects/mysql.rst
doc/build/dialects/oracle.rst
doc/build/dialects/postgresql.rst
doc/build/dialects/sqlite.rst
doc/build/dialects/sybase.rst
doc/build/faq/connections.rst
doc/build/faq/index.rst
doc/build/faq/metadata_schema.rst
doc/build/faq/ormconfiguration.rst
doc/build/faq/performance.rst
doc/build/faq/sessions.rst
doc/build/faq/sqlexpressions.rst
doc/build/orm/backref.rst
doc/build/orm/basic_relationships.rst
doc/build/orm/cascades.rst
doc/build/orm/classical.rst
doc/build/orm/collections.rst
doc/build/orm/composites.rst
doc/build/orm/constructors.rst
doc/build/orm/contextual.rst
doc/build/orm/deprecated.rst
doc/build/orm/events.rst
doc/build/orm/examples.rst
doc/build/orm/exceptions.rst
doc/build/orm/extending.rst
doc/build/orm/index.rst
doc/build/orm/inheritance.rst
doc/build/orm/inheritance_loading.rst
doc/build/orm/internals.rst
doc/build/orm/join_conditions.rst
doc/build/orm/loading.rst
doc/build/orm/loading_columns.rst
doc/build/orm/loading_objects.rst
doc/build/orm/loading_relationships.rst
doc/build/orm/mapped_attributes.rst
doc/build/orm/mapped_sql_expr.rst
doc/build/orm/mapper_config.rst
doc/build/orm/mapping_api.rst
doc/build/orm/mapping_columns.rst
doc/build/orm/mapping_styles.rst
doc/build/orm/nonstandard_mappings.rst
doc/build/orm/persistence_techniques.rst
doc/build/orm/query.rst
doc/build/orm/relationship_api.rst
doc/build/orm/relationship_persistence.rst
doc/build/orm/relationships.rst
doc/build/orm/scalar_mapping.rst
doc/build/orm/self_referential.rst
doc/build/orm/session.rst
doc/build/orm/session_api.rst
doc/build/orm/session_basics.rst
doc/build/orm/session_events.rst
doc/build/orm/session_state_management.rst
doc/build/orm/session_transaction.rst
doc/build/orm/tutorial.rst
doc/build/orm/versioning.rst
doc/build/orm/extensions/associationproxy.rst
doc/build/orm/extensions/automap.rst
doc/build/orm/extensions/baked.rst
doc/build/orm/extensions/horizontal_shard.rst
doc/build/orm/extensions/hybrid.rst
doc/build/orm/extensions/index.rst
doc/build/orm/extensions/indexable.rst
doc/build/orm/extensions/instrumentation.rst
doc/build/orm/extensions/mutable.rst
doc/build/orm/extensions/orderinglist.rst
doc/build/orm/extensions/declarative/api.rst
doc/build/orm/extensions/declarative/basic_use.rst
doc/build/orm/extensions/declarative/index.rst
doc/build/orm/extensions/declarative/inheritance.rst
doc/build/orm/extensions/declarative/mixins.rst
doc/build/orm/extensions/declarative/relationships.rst
doc/build/orm/extensions/declarative/table_config.rst
doc/build/texinputs/Makefile
doc/build/texinputs/sphinx.sty
doc/changelog/changelog_01.html
doc/changelog/changelog_02.html
doc/changelog/changelog_03.html
doc/changelog/changelog_04.html
doc/changelog/changelog_05.html
doc/changelog/changelog_06.html
doc/changelog/changelog_07.html
doc/changelog/changelog_08.html
doc/changelog/changelog_09.html
doc/changelog/changelog_10.html
doc/changelog/changelog_11.html
doc/changelog/changelog_12.html
doc/changelog/changelog_13.html
doc/changelog/index.html
doc/changelog/migration_04.html
doc/changelog/migration_05.html
doc/changelog/migration_06.html
doc/changelog/migration_07.html
doc/changelog/migration_08.html
doc/changelog/migration_09.html
doc/changelog/migration_10.html
doc/changelog/migration_11.html
doc/changelog/migration_12.html
doc/changelog/migration_13.html
doc/core/api_basics.html
doc/core/compiler.html
doc/core/connections.html
doc/core/constraints.html
doc/core/custom_types.html
doc/core/ddl.html
doc/core/defaults.html
doc/core/dml.html
doc/core/engines.html
doc/core/engines_connections.html
doc/core/event.html
doc/core/events.html
doc/core/exceptions.html
doc/core/expression_api.html
doc/core/functions.html
doc/core/index.html
doc/core/inspection.html
doc/core/interfaces.html
doc/core/internals.html
doc/core/metadata.html
doc/core/pooling.html
doc/core/reflection.html
doc/core/schema.html
doc/core/selectable.html
doc/core/serializer.html
doc/core/sqlelement.html
doc/core/tutorial.html
doc/core/type_api.html
doc/core/type_basics.html
doc/core/types.html
doc/core/visitors.html
doc/dialects/firebird.html
doc/dialects/index.html
doc/dialects/mssql.html
doc/dialects/mysql.html
doc/dialects/oracle.html
doc/dialects/postgresql.html
doc/dialects/sqlite.html
doc/dialects/sybase.html
doc/faq/connections.html
doc/faq/index.html
doc/faq/metadata_schema.html
doc/faq/ormconfiguration.html
doc/faq/performance.html
doc/faq/sessions.html
doc/faq/sqlexpressions.html
doc/orm/backref.html
doc/orm/basic_relationships.html
doc/orm/cascades.html
doc/orm/classical.html
doc/orm/collections.html
doc/orm/composites.html
doc/orm/constructors.html
doc/orm/contextual.html
doc/orm/deprecated.html
doc/orm/events.html
doc/orm/examples.html
doc/orm/exceptions.html
doc/orm/extending.html
doc/orm/index.html
doc/orm/inheritance.html
doc/orm/inheritance_loading.html
doc/orm/internals.html
doc/orm/join_conditions.html
doc/orm/loading.html
doc/orm/loading_columns.html
doc/orm/loading_objects.html
doc/orm/loading_relationships.html
doc/orm/mapped_attributes.html
doc/orm/mapped_sql_expr.html
doc/orm/mapper_config.html
doc/orm/mapping_api.html
doc/orm/mapping_columns.html
doc/orm/mapping_styles.html
doc/orm/nonstandard_mappings.html
doc/orm/persistence_techniques.html
doc/orm/query.html
doc/orm/relationship_api.html
doc/orm/relationship_persistence.html
doc/orm/relationships.html
doc/orm/scalar_mapping.html
doc/orm/self_referential.html
doc/orm/session.html
doc/orm/session_api.html
doc/orm/session_basics.html
doc/orm/session_events.html
doc/orm/session_state_management.html
doc/orm/session_transaction.html
doc/orm/tutorial.html
doc/orm/versioning.html
doc/orm/extensions/associationproxy.html
doc/orm/extensions/automap.html
doc/orm/extensions/baked.html
doc/orm/extensions/horizontal_shard.html
doc/orm/extensions/hybrid.html
doc/orm/extensions/index.html
doc/orm/extensions/indexable.html
doc/orm/extensions/instrumentation.html
doc/orm/extensions/mutable.html
doc/orm/extensions/orderinglist.html
doc/orm/extensions/declarative/api.html
doc/orm/extensions/declarative/basic_use.html
doc/orm/extensions/declarative/index.html
doc/orm/extensions/declarative/inheritance.html
doc/orm/extensions/declarative/mixins.html
doc/orm/extensions/declarative/relationships.html
doc/orm/extensions/declarative/table_config.html
examples/__init__.py
examples/adjacency_list/__init__.py
examples/adjacency_list/adjacency_list.py
examples/association/__init__.py
examples/association/basic_association.py
examples/association/dict_of_sets_with_default.py
examples/association/proxied_association.py
examples/custom_attributes/__init__.py
examples/custom_attributes/active_column_defaults.py
examples/custom_attributes/custom_management.py
examples/custom_attributes/listen_for_events.py
examples/dogpile_caching/__init__.py
examples/dogpile_caching/advanced.py
examples/dogpile_caching/caching_query.py
examples/dogpile_caching/environment.py
examples/dogpile_caching/fixture_data.py
examples/dogpile_caching/helloworld.py
examples/dogpile_caching/local_session_caching.py
examples/dogpile_caching/model.py
examples/dogpile_caching/relationship_caching.py
examples/dynamic_dict/__init__.py
examples/dynamic_dict/dynamic_dict.py
examples/elementtree/__init__.py
examples/elementtree/adjacency_list.py
examples/elementtree/optimized_al.py
examples/elementtree/pickle_type.py
examples/elementtree/test.xml
examples/elementtree/test2.xml
examples/elementtree/test3.xml
examples/generic_associations/__init__.py
examples/generic_associations/discriminator_on_association.py
examples/generic_associations/generic_fk.py
examples/generic_associations/table_per_association.py
examples/generic_associations/table_per_related.py
examples/graphs/__init__.py
examples/graphs/directed_graph.py
examples/inheritance/__init__.py
examples/inheritance/concrete.py
examples/inheritance/joined.py
examples/inheritance/single.py
examples/join_conditions/__init__.py
examples/join_conditions/cast.py
examples/join_conditions/threeway.py
examples/large_collection/__init__.py
examples/large_collection/large_collection.py
examples/materialized_paths/__init__.py
examples/materialized_paths/materialized_paths.py
examples/nested_sets/__init__.py
examples/nested_sets/nested_sets.py
examples/performance/__init__.py
examples/performance/__main__.py
examples/performance/bulk_inserts.py
examples/performance/bulk_updates.py
examples/performance/large_resultsets.py
examples/performance/short_selects.py
examples/performance/single_inserts.py
examples/postgis/__init__.py
examples/postgis/postgis.py
examples/sharding/__init__.py
examples/sharding/attribute_shard.py
examples/space_invaders/__init__.py
examples/space_invaders/space_invaders.py
examples/versioned_history/__init__.py
examples/versioned_history/history_meta.py
examples/versioned_history/test_versioning.py
examples/versioned_rows/__init__.py
examples/versioned_rows/versioned_map.py
examples/versioned_rows/versioned_rows.py
examples/versioned_rows/versioned_rows_w_versionid.py
examples/versioned_rows/versioned_update_old_row.py
examples/vertical/__init__.py
examples/vertical/dictlike-polymorphic.py
examples/vertical/dictlike.py
lib/SQLAlchemy.egg-info/PKG-INFO
lib/SQLAlchemy.egg-info/SOURCES.txt
lib/SQLAlchemy.egg-info/dependency_links.txt
lib/SQLAlchemy.egg-info/requires.txt
lib/SQLAlchemy.egg-info/top_level.txt
lib/sqlalchemy/__init__.py
lib/sqlalchemy/events.py
lib/sqlalchemy/exc.py
lib/sqlalchemy/inspection.py
lib/sqlalchemy/interfaces.py
lib/sqlalchemy/log.py
lib/sqlalchemy/processors.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/types.py
lib/sqlalchemy/cextension/processors.c
lib/sqlalchemy/cextension/resultproxy.c
lib/sqlalchemy/cextension/utils.c
lib/sqlalchemy/connectors/__init__.py
lib/sqlalchemy/connectors/mxodbc.py
lib/sqlalchemy/connectors/pyodbc.py
lib/sqlalchemy/connectors/zxJDBC.py
lib/sqlalchemy/databases/__init__.py
lib/sqlalchemy/dialects/__init__.py
lib/sqlalchemy/dialects/type_migration_guidelines.txt
lib/sqlalchemy/dialects/firebird/__init__.py
lib/sqlalchemy/dialects/firebird/base.py
lib/sqlalchemy/dialects/firebird/fdb.py
lib/sqlalchemy/dialects/firebird/kinterbasdb.py
lib/sqlalchemy/dialects/mssql/__init__.py
lib/sqlalchemy/dialects/mssql/adodbapi.py
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mssql/information_schema.py
lib/sqlalchemy/dialects/mssql/mxodbc.py
lib/sqlalchemy/dialects/mssql/pymssql.py
lib/sqlalchemy/dialects/mssql/pyodbc.py
lib/sqlalchemy/dialects/mssql/zxjdbc.py
lib/sqlalchemy/dialects/mysql/__init__.py
lib/sqlalchemy/dialects/mysql/base.py
lib/sqlalchemy/dialects/mysql/cymysql.py
lib/sqlalchemy/dialects/mysql/dml.py
lib/sqlalchemy/dialects/mysql/enumerated.py
lib/sqlalchemy/dialects/mysql/gaerdbms.py
lib/sqlalchemy/dialects/mysql/json.py
lib/sqlalchemy/dialects/mysql/mysqlconnector.py
lib/sqlalchemy/dialects/mysql/mysqldb.py
lib/sqlalchemy/dialects/mysql/oursql.py
lib/sqlalchemy/dialects/mysql/pymysql.py
lib/sqlalchemy/dialects/mysql/pyodbc.py
lib/sqlalchemy/dialects/mysql/reflection.py
lib/sqlalchemy/dialects/mysql/types.py
lib/sqlalchemy/dialects/mysql/zxjdbc.py
lib/sqlalchemy/dialects/oracle/__init__.py
lib/sqlalchemy/dialects/oracle/base.py
lib/sqlalchemy/dialects/oracle/cx_oracle.py
lib/sqlalchemy/dialects/oracle/zxjdbc.py
lib/sqlalchemy/dialects/postgresql/__init__.py
lib/sqlalchemy/dialects/postgresql/array.py
lib/sqlalchemy/dialects/postgresql/base.py
lib/sqlalchemy/dialects/postgresql/dml.py
lib/sqlalchemy/dialects/postgresql/ext.py
lib/sqlalchemy/dialects/postgresql/hstore.py
lib/sqlalchemy/dialects/postgresql/json.py
lib/sqlalchemy/dialects/postgresql/pg8000.py
lib/sqlalchemy/dialects/postgresql/psycopg2.py
lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
lib/sqlalchemy/dialects/postgresql/pygresql.py
lib/sqlalchemy/dialects/postgresql/pypostgresql.py
lib/sqlalchemy/dialects/postgresql/ranges.py
lib/sqlalchemy/dialects/postgresql/zxjdbc.py
lib/sqlalchemy/dialects/sqlite/__init__.py
lib/sqlalchemy/dialects/sqlite/base.py
lib/sqlalchemy/dialects/sqlite/json.py
lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
lib/sqlalchemy/dialects/sqlite/pysqlite.py
lib/sqlalchemy/dialects/sybase/__init__.py
lib/sqlalchemy/dialects/sybase/base.py
lib/sqlalchemy/dialects/sybase/mxodbc.py
lib/sqlalchemy/dialects/sybase/pyodbc.py
lib/sqlalchemy/dialects/sybase/pysybase.py
lib/sqlalchemy/engine/__init__.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/default.py
lib/sqlalchemy/engine/interfaces.py
lib/sqlalchemy/engine/reflection.py
lib/sqlalchemy/engine/result.py
lib/sqlalchemy/engine/strategies.py
lib/sqlalchemy/engine/threadlocal.py
lib/sqlalchemy/engine/url.py
lib/sqlalchemy/engine/util.py
lib/sqlalchemy/event/__init__.py
lib/sqlalchemy/event/api.py
lib/sqlalchemy/event/attr.py
lib/sqlalchemy/event/base.py
lib/sqlalchemy/event/legacy.py
lib/sqlalchemy/event/registry.py
lib/sqlalchemy/ext/__init__.py
lib/sqlalchemy/ext/associationproxy.py
lib/sqlalchemy/ext/automap.py
lib/sqlalchemy/ext/baked.py
lib/sqlalchemy/ext/compiler.py
lib/sqlalchemy/ext/horizontal_shard.py
lib/sqlalchemy/ext/hybrid.py
lib/sqlalchemy/ext/indexable.py
lib/sqlalchemy/ext/instrumentation.py
lib/sqlalchemy/ext/mutable.py
lib/sqlalchemy/ext/orderinglist.py
lib/sqlalchemy/ext/serializer.py
lib/sqlalchemy/ext/declarative/__init__.py
lib/sqlalchemy/ext/declarative/api.py
lib/sqlalchemy/ext/declarative/base.py
lib/sqlalchemy/ext/declarative/clsregistry.py
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/attributes.py
lib/sqlalchemy/orm/base.py
lib/sqlalchemy/orm/collections.py
lib/sqlalchemy/orm/dependency.py
lib/sqlalchemy/orm/deprecated_interfaces.py
lib/sqlalchemy/orm/descriptor_props.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/evaluator.py
lib/sqlalchemy/orm/events.py
lib/sqlalchemy/orm/exc.py
lib/sqlalchemy/orm/identity.py
lib/sqlalchemy/orm/instrumentation.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/loading.py
lib/sqlalchemy/orm/mapper.py
lib/sqlalchemy/orm/path_registry.py
lib/sqlalchemy/orm/persistence.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/relationships.py
lib/sqlalchemy/orm/scoping.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/state.py
lib/sqlalchemy/orm/strategies.py
lib/sqlalchemy/orm/strategy_options.py
lib/sqlalchemy/orm/sync.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/orm/util.py
lib/sqlalchemy/pool/__init__.py
lib/sqlalchemy/pool/base.py
lib/sqlalchemy/pool/dbapi_proxy.py
lib/sqlalchemy/pool/impl.py
lib/sqlalchemy/sql/__init__.py
lib/sqlalchemy/sql/annotation.py
lib/sqlalchemy/sql/base.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/crud.py
lib/sqlalchemy/sql/ddl.py
lib/sqlalchemy/sql/default_comparator.py
lib/sqlalchemy/sql/dml.py
lib/sqlalchemy/sql/elements.py
lib/sqlalchemy/sql/expression.py
lib/sqlalchemy/sql/functions.py
lib/sqlalchemy/sql/naming.py
lib/sqlalchemy/sql/operators.py
lib/sqlalchemy/sql/schema.py
lib/sqlalchemy/sql/selectable.py
lib/sqlalchemy/sql/sqltypes.py
lib/sqlalchemy/sql/type_api.py
lib/sqlalchemy/sql/util.py
lib/sqlalchemy/sql/visitors.py
lib/sqlalchemy/testing/__init__.py
lib/sqlalchemy/testing/assertions.py
lib/sqlalchemy/testing/assertsql.py
lib/sqlalchemy/testing/config.py
lib/sqlalchemy/testing/engines.py
lib/sqlalchemy/testing/entities.py
lib/sqlalchemy/testing/exclusions.py
lib/sqlalchemy/testing/fixtures.py
lib/sqlalchemy/testing/mock.py
lib/sqlalchemy/testing/pickleable.py
lib/sqlalchemy/testing/profiling.py
lib/sqlalchemy/testing/provision.py
lib/sqlalchemy/testing/replay_fixture.py
lib/sqlalchemy/testing/requirements.py
lib/sqlalchemy/testing/schema.py
lib/sqlalchemy/testing/util.py
lib/sqlalchemy/testing/warnings.py
lib/sqlalchemy/testing/plugin/__init__.py
lib/sqlalchemy/testing/plugin/bootstrap.py
lib/sqlalchemy/testing/plugin/plugin_base.py
lib/sqlalchemy/testing/plugin/pytestplugin.py
lib/sqlalchemy/testing/suite/__init__.py
lib/sqlalchemy/testing/suite/test_cte.py
lib/sqlalchemy/testing/suite/test_ddl.py
lib/sqlalchemy/testing/suite/test_dialect.py
lib/sqlalchemy/testing/suite/test_insert.py
lib/sqlalchemy/testing/suite/test_reflection.py
lib/sqlalchemy/testing/suite/test_results.py
lib/sqlalchemy/testing/suite/test_select.py
lib/sqlalchemy/testing/suite/test_sequence.py
lib/sqlalchemy/testing/suite/test_types.py
lib/sqlalchemy/testing/suite/test_update_delete.py
lib/sqlalchemy/util/__init__.py
lib/sqlalchemy/util/_collections.py
lib/sqlalchemy/util/compat.py
lib/sqlalchemy/util/deprecations.py
lib/sqlalchemy/util/langhelpers.py
lib/sqlalchemy/util/queue.py
lib/sqlalchemy/util/topological.py
test/__init__.py
test/binary_data_one.dat
test/binary_data_two.dat
test/conftest.py
test/requirements.py
test/aaa_profiling/__init__.py
test/aaa_profiling/test_compiler.py
test/aaa_profiling/test_memusage.py
test/aaa_profiling/test_misc.py
test/aaa_profiling/test_orm.py
test/aaa_profiling/test_pool.py
test/aaa_profiling/test_resultset.py
test/aaa_profiling/test_zoomark.py
test/aaa_profiling/test_zoomark_orm.py
test/base/__init__.py
test/base/test_dependency.py
test/base/test_events.py
test/base/test_except.py
test/base/test_inspect.py
test/base/test_tutorials.py
test/base/test_utils.py
test/dialect/__init__.py
test/dialect/test_all.py
test/dialect/test_firebird.py
test/dialect/test_mxodbc.py
test/dialect/test_pyodbc.py
test/dialect/test_sqlite.py
test/dialect/test_suite.py
test/dialect/test_sybase.py
test/dialect/mssql/__init__.py
test/dialect/mssql/test_compiler.py
test/dialect/mssql/test_engine.py
test/dialect/mssql/test_query.py
test/dialect/mssql/test_reflection.py
test/dialect/mssql/test_types.py
test/dialect/mysql/__init__.py
test/dialect/mysql/test_compiler.py
test/dialect/mysql/test_dialect.py
test/dialect/mysql/test_for_update.py
test/dialect/mysql/test_on_duplicate.py
test/dialect/mysql/test_query.py
test/dialect/mysql/test_reflection.py
test/dialect/mysql/test_types.py
test/dialect/oracle/__init__.py
test/dialect/oracle/test_compiler.py
test/dialect/oracle/test_dialect.py
test/dialect/oracle/test_reflection.py
test/dialect/oracle/test_types.py
test/dialect/postgresql/__init__.py
test/dialect/postgresql/test_compiler.py
test/dialect/postgresql/test_dialect.py
test/dialect/postgresql/test_on_conflict.py
test/dialect/postgresql/test_query.py
test/dialect/postgresql/test_reflection.py
test/dialect/postgresql/test_types.py
test/engine/__init__.py
test/engine/test_bind.py
test/engine/test_ddlevents.py
test/engine/test_deprecations.py
test/engine/test_execute.py
test/engine/test_logging.py
test/engine/test_parseconnect.py
test/engine/test_pool.py
test/engine/test_processors.py
test/engine/test_reconnect.py
test/engine/test_reflection.py
test/engine/test_transaction.py
test/ext/__init__.py
test/ext/test_associationproxy.py
test/ext/test_automap.py
test/ext/test_baked.py
test/ext/test_compiler.py
test/ext/test_deprecations.py
test/ext/test_extendedattr.py
test/ext/test_horizontal_shard.py
test/ext/test_hybrid.py
test/ext/test_indexable.py
test/ext/test_mutable.py
test/ext/test_orderinglist.py
test/ext/test_serializer.py
test/ext/declarative/__init__.py
test/ext/declarative/test_basic.py
test/ext/declarative/test_clsregistry.py
test/ext/declarative/test_concurrency.py
test/ext/declarative/test_inheritance.py
test/ext/declarative/test_mixin.py
test/ext/declarative/test_reflection.py
test/orm/__init__.py
test/orm/_fixtures.py
test/orm/test_ac_relationships.py
test/orm/test_association.py
test/orm/test_assorted_eager.py
test/orm/test_attributes.py
test/orm/test_backref_mutations.py
test/orm/test_bind.py
test/orm/test_bulk.py
test/orm/test_bundle.py
test/orm/test_cascade.py
test/orm/test_collection.py
test/orm/test_compile.py
test/orm/test_composites.py
test/orm/test_cycles.py
test/orm/test_default_strategies.py
test/orm/test_defaults.py
test/orm/test_deferred.py
test/orm/test_deprecations.py
test/orm/test_descriptor.py
test/orm/test_dynamic.py
test/orm/test_eager_relations.py
test/orm/test_evaluator.py
test/orm/test_events.py
test/orm/test_expire.py
test/orm/test_froms.py
test/orm/test_generative.py
test/orm/test_hasparent.py
test/orm/test_immediate_load.py
test/orm/test_inspect.py
test/orm/test_instrumentation.py
test/orm/test_joins.py
test/orm/test_lazy_relations.py
test/orm/test_load_on_fks.py
test/orm/test_loading.py
test/orm/test_lockmode.py
test/orm/test_manytomany.py
test/orm/test_mapper.py
test/orm/test_merge.py
test/orm/test_naturalpks.py
test/orm/test_of_type.py
test/orm/test_onetoone.py
test/orm/test_options.py
test/orm/test_pickled.py
test/orm/test_query.py
test/orm/test_rel_fn.py
test/orm/test_relationships.py
test/orm/test_scoping.py
test/orm/test_selectable.py
test/orm/test_selectin_relations.py
test/orm/test_session.py
test/orm/test_subquery_relations.py
test/orm/test_sync.py
test/orm/test_transaction.py
test/orm/test_unitofwork.py
test/orm/test_unitofworkv2.py
test/orm/test_update_delete.py
test/orm/test_utils.py
test/orm/test_validators.py
test/orm/test_versioning.py
test/orm/inheritance/__init__.py
test/orm/inheritance/_poly_fixtures.py
test/orm/inheritance/test_abc_inheritance.py
test/orm/inheritance/test_abc_polymorphic.py
test/orm/inheritance/test_assorted_poly.py
test/orm/inheritance/test_basic.py
test/orm/inheritance/test_concrete.py
test/orm/inheritance/test_magazine.py
test/orm/inheritance/test_manytomany.py
test/orm/inheritance/test_poly_linked_list.py
test/orm/inheritance/test_poly_loading.py
test/orm/inheritance/test_poly_persistence.py
test/orm/inheritance/test_polymorphic_rel.py
test/orm/inheritance/test_productspec.py
test/orm/inheritance/test_relationship.py
test/orm/inheritance/test_selects.py
test/orm/inheritance/test_single.py
test/orm/inheritance/test_with_poly.py
test/perf/invalidate_stresstest.py
test/perf/orm2010.py
test/sql/__init__.py
test/sql/test_case_statement.py
test/sql/test_compiler.py
test/sql/test_computed.py
test/sql/test_constraints.py
test/sql/test_cte.py
test/sql/test_ddlemit.py
test/sql/test_defaults.py
test/sql/test_delete.py
test/sql/test_deprecations.py
test/sql/test_functions.py
test/sql/test_generative.py
test/sql/test_insert.py
test/sql/test_insert_exec.py
test/sql/test_inspect.py
test/sql/test_join_rewriting.py
test/sql/test_labels.py
test/sql/test_lateral.py
test/sql/test_metadata.py
test/sql/test_operators.py
test/sql/test_query.py
test/sql/test_quote.py
test/sql/test_resultset.py
test/sql/test_returning.py
test/sql/test_rowcount.py
test/sql/test_selectable.py
test/sql/test_tablesample.py
test/sql/test_text.py
test/sql/test_type_expressions.py
test/sql/test_types.py
test/sql/test_unicode.py
test/sql/test_update.py
test/sql/test_utils.py

View file

@ -0,0 +1,394 @@
../sqlalchemy/__init__.py
../sqlalchemy/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/__pycache__/events.cpython-37.pyc
../sqlalchemy/__pycache__/exc.cpython-37.pyc
../sqlalchemy/__pycache__/inspection.cpython-37.pyc
../sqlalchemy/__pycache__/interfaces.cpython-37.pyc
../sqlalchemy/__pycache__/log.cpython-37.pyc
../sqlalchemy/__pycache__/processors.cpython-37.pyc
../sqlalchemy/__pycache__/schema.cpython-37.pyc
../sqlalchemy/__pycache__/types.cpython-37.pyc
../sqlalchemy/connectors/__init__.py
../sqlalchemy/connectors/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/connectors/__pycache__/mxodbc.cpython-37.pyc
../sqlalchemy/connectors/__pycache__/pyodbc.cpython-37.pyc
../sqlalchemy/connectors/__pycache__/zxJDBC.cpython-37.pyc
../sqlalchemy/connectors/mxodbc.py
../sqlalchemy/connectors/pyodbc.py
../sqlalchemy/connectors/zxJDBC.py
../sqlalchemy/cprocessors.cpython-37m-x86_64-linux-gnu.so
../sqlalchemy/cresultproxy.cpython-37m-x86_64-linux-gnu.so
../sqlalchemy/cutils.cpython-37m-x86_64-linux-gnu.so
../sqlalchemy/databases/__init__.py
../sqlalchemy/databases/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/__init__.py
../sqlalchemy/dialects/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/firebird/__init__.py
../sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/firebird/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-37.pyc
../sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-37.pyc
../sqlalchemy/dialects/firebird/base.py
../sqlalchemy/dialects/firebird/fdb.py
../sqlalchemy/dialects/firebird/kinterbasdb.py
../sqlalchemy/dialects/mssql/__init__.py
../sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-37.pyc
../sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-37.pyc
../sqlalchemy/dialects/mssql/adodbapi.py
../sqlalchemy/dialects/mssql/base.py
../sqlalchemy/dialects/mssql/information_schema.py
../sqlalchemy/dialects/mssql/mxodbc.py
../sqlalchemy/dialects/mssql/pymssql.py
../sqlalchemy/dialects/mssql/pyodbc.py
../sqlalchemy/dialects/mssql/zxjdbc.py
../sqlalchemy/dialects/mysql/__init__.py
../sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/dml.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/json.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/types.cpython-37.pyc
../sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-37.pyc
../sqlalchemy/dialects/mysql/base.py
../sqlalchemy/dialects/mysql/cymysql.py
../sqlalchemy/dialects/mysql/dml.py
../sqlalchemy/dialects/mysql/enumerated.py
../sqlalchemy/dialects/mysql/gaerdbms.py
../sqlalchemy/dialects/mysql/json.py
../sqlalchemy/dialects/mysql/mysqlconnector.py
../sqlalchemy/dialects/mysql/mysqldb.py
../sqlalchemy/dialects/mysql/oursql.py
../sqlalchemy/dialects/mysql/pymysql.py
../sqlalchemy/dialects/mysql/pyodbc.py
../sqlalchemy/dialects/mysql/reflection.py
../sqlalchemy/dialects/mysql/types.py
../sqlalchemy/dialects/mysql/zxjdbc.py
../sqlalchemy/dialects/oracle/__init__.py
../sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/oracle/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-37.pyc
../sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-37.pyc
../sqlalchemy/dialects/oracle/base.py
../sqlalchemy/dialects/oracle/cx_oracle.py
../sqlalchemy/dialects/oracle/zxjdbc.py
../sqlalchemy/dialects/postgresql/__init__.py
../sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/array.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/json.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-37.pyc
../sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-37.pyc
../sqlalchemy/dialects/postgresql/array.py
../sqlalchemy/dialects/postgresql/base.py
../sqlalchemy/dialects/postgresql/dml.py
../sqlalchemy/dialects/postgresql/ext.py
../sqlalchemy/dialects/postgresql/hstore.py
../sqlalchemy/dialects/postgresql/json.py
../sqlalchemy/dialects/postgresql/pg8000.py
../sqlalchemy/dialects/postgresql/psycopg2.py
../sqlalchemy/dialects/postgresql/psycopg2cffi.py
../sqlalchemy/dialects/postgresql/pygresql.py
../sqlalchemy/dialects/postgresql/pypostgresql.py
../sqlalchemy/dialects/postgresql/ranges.py
../sqlalchemy/dialects/postgresql/zxjdbc.py
../sqlalchemy/dialects/sqlite/__init__.py
../sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/sqlite/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/sqlite/__pycache__/json.cpython-37.pyc
../sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-37.pyc
../sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-37.pyc
../sqlalchemy/dialects/sqlite/base.py
../sqlalchemy/dialects/sqlite/json.py
../sqlalchemy/dialects/sqlite/pysqlcipher.py
../sqlalchemy/dialects/sqlite/pysqlite.py
../sqlalchemy/dialects/sybase/__init__.py
../sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/dialects/sybase/__pycache__/base.cpython-37.pyc
../sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-37.pyc
../sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-37.pyc
../sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-37.pyc
../sqlalchemy/dialects/sybase/base.py
../sqlalchemy/dialects/sybase/mxodbc.py
../sqlalchemy/dialects/sybase/pyodbc.py
../sqlalchemy/dialects/sybase/pysybase.py
../sqlalchemy/engine/__init__.py
../sqlalchemy/engine/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/engine/__pycache__/base.cpython-37.pyc
../sqlalchemy/engine/__pycache__/default.cpython-37.pyc
../sqlalchemy/engine/__pycache__/interfaces.cpython-37.pyc
../sqlalchemy/engine/__pycache__/reflection.cpython-37.pyc
../sqlalchemy/engine/__pycache__/result.cpython-37.pyc
../sqlalchemy/engine/__pycache__/strategies.cpython-37.pyc
../sqlalchemy/engine/__pycache__/threadlocal.cpython-37.pyc
../sqlalchemy/engine/__pycache__/url.cpython-37.pyc
../sqlalchemy/engine/__pycache__/util.cpython-37.pyc
../sqlalchemy/engine/base.py
../sqlalchemy/engine/default.py
../sqlalchemy/engine/interfaces.py
../sqlalchemy/engine/reflection.py
../sqlalchemy/engine/result.py
../sqlalchemy/engine/strategies.py
../sqlalchemy/engine/threadlocal.py
../sqlalchemy/engine/url.py
../sqlalchemy/engine/util.py
../sqlalchemy/event/__init__.py
../sqlalchemy/event/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/event/__pycache__/api.cpython-37.pyc
../sqlalchemy/event/__pycache__/attr.cpython-37.pyc
../sqlalchemy/event/__pycache__/base.cpython-37.pyc
../sqlalchemy/event/__pycache__/legacy.cpython-37.pyc
../sqlalchemy/event/__pycache__/registry.cpython-37.pyc
../sqlalchemy/event/api.py
../sqlalchemy/event/attr.py
../sqlalchemy/event/base.py
../sqlalchemy/event/legacy.py
../sqlalchemy/event/registry.py
../sqlalchemy/events.py
../sqlalchemy/exc.py
../sqlalchemy/ext/__init__.py
../sqlalchemy/ext/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/ext/__pycache__/associationproxy.cpython-37.pyc
../sqlalchemy/ext/__pycache__/automap.cpython-37.pyc
../sqlalchemy/ext/__pycache__/baked.cpython-37.pyc
../sqlalchemy/ext/__pycache__/compiler.cpython-37.pyc
../sqlalchemy/ext/__pycache__/horizontal_shard.cpython-37.pyc
../sqlalchemy/ext/__pycache__/hybrid.cpython-37.pyc
../sqlalchemy/ext/__pycache__/indexable.cpython-37.pyc
../sqlalchemy/ext/__pycache__/instrumentation.cpython-37.pyc
../sqlalchemy/ext/__pycache__/mutable.cpython-37.pyc
../sqlalchemy/ext/__pycache__/orderinglist.cpython-37.pyc
../sqlalchemy/ext/__pycache__/serializer.cpython-37.pyc
../sqlalchemy/ext/associationproxy.py
../sqlalchemy/ext/automap.py
../sqlalchemy/ext/baked.py
../sqlalchemy/ext/compiler.py
../sqlalchemy/ext/declarative/__init__.py
../sqlalchemy/ext/declarative/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/ext/declarative/__pycache__/api.cpython-37.pyc
../sqlalchemy/ext/declarative/__pycache__/base.cpython-37.pyc
../sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-37.pyc
../sqlalchemy/ext/declarative/api.py
../sqlalchemy/ext/declarative/base.py
../sqlalchemy/ext/declarative/clsregistry.py
../sqlalchemy/ext/horizontal_shard.py
../sqlalchemy/ext/hybrid.py
../sqlalchemy/ext/indexable.py
../sqlalchemy/ext/instrumentation.py
../sqlalchemy/ext/mutable.py
../sqlalchemy/ext/orderinglist.py
../sqlalchemy/ext/serializer.py
../sqlalchemy/inspection.py
../sqlalchemy/interfaces.py
../sqlalchemy/log.py
../sqlalchemy/orm/__init__.py
../sqlalchemy/orm/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/orm/__pycache__/attributes.cpython-37.pyc
../sqlalchemy/orm/__pycache__/base.cpython-37.pyc
../sqlalchemy/orm/__pycache__/collections.cpython-37.pyc
../sqlalchemy/orm/__pycache__/dependency.cpython-37.pyc
../sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-37.pyc
../sqlalchemy/orm/__pycache__/descriptor_props.cpython-37.pyc
../sqlalchemy/orm/__pycache__/dynamic.cpython-37.pyc
../sqlalchemy/orm/__pycache__/evaluator.cpython-37.pyc
../sqlalchemy/orm/__pycache__/events.cpython-37.pyc
../sqlalchemy/orm/__pycache__/exc.cpython-37.pyc
../sqlalchemy/orm/__pycache__/identity.cpython-37.pyc
../sqlalchemy/orm/__pycache__/instrumentation.cpython-37.pyc
../sqlalchemy/orm/__pycache__/interfaces.cpython-37.pyc
../sqlalchemy/orm/__pycache__/loading.cpython-37.pyc
../sqlalchemy/orm/__pycache__/mapper.cpython-37.pyc
../sqlalchemy/orm/__pycache__/path_registry.cpython-37.pyc
../sqlalchemy/orm/__pycache__/persistence.cpython-37.pyc
../sqlalchemy/orm/__pycache__/properties.cpython-37.pyc
../sqlalchemy/orm/__pycache__/query.cpython-37.pyc
../sqlalchemy/orm/__pycache__/relationships.cpython-37.pyc
../sqlalchemy/orm/__pycache__/scoping.cpython-37.pyc
../sqlalchemy/orm/__pycache__/session.cpython-37.pyc
../sqlalchemy/orm/__pycache__/state.cpython-37.pyc
../sqlalchemy/orm/__pycache__/strategies.cpython-37.pyc
../sqlalchemy/orm/__pycache__/strategy_options.cpython-37.pyc
../sqlalchemy/orm/__pycache__/sync.cpython-37.pyc
../sqlalchemy/orm/__pycache__/unitofwork.cpython-37.pyc
../sqlalchemy/orm/__pycache__/util.cpython-37.pyc
../sqlalchemy/orm/attributes.py
../sqlalchemy/orm/base.py
../sqlalchemy/orm/collections.py
../sqlalchemy/orm/dependency.py
../sqlalchemy/orm/deprecated_interfaces.py
../sqlalchemy/orm/descriptor_props.py
../sqlalchemy/orm/dynamic.py
../sqlalchemy/orm/evaluator.py
../sqlalchemy/orm/events.py
../sqlalchemy/orm/exc.py
../sqlalchemy/orm/identity.py
../sqlalchemy/orm/instrumentation.py
../sqlalchemy/orm/interfaces.py
../sqlalchemy/orm/loading.py
../sqlalchemy/orm/mapper.py
../sqlalchemy/orm/path_registry.py
../sqlalchemy/orm/persistence.py
../sqlalchemy/orm/properties.py
../sqlalchemy/orm/query.py
../sqlalchemy/orm/relationships.py
../sqlalchemy/orm/scoping.py
../sqlalchemy/orm/session.py
../sqlalchemy/orm/state.py
../sqlalchemy/orm/strategies.py
../sqlalchemy/orm/strategy_options.py
../sqlalchemy/orm/sync.py
../sqlalchemy/orm/unitofwork.py
../sqlalchemy/orm/util.py
../sqlalchemy/pool/__init__.py
../sqlalchemy/pool/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/pool/__pycache__/base.cpython-37.pyc
../sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-37.pyc
../sqlalchemy/pool/__pycache__/impl.cpython-37.pyc
../sqlalchemy/pool/base.py
../sqlalchemy/pool/dbapi_proxy.py
../sqlalchemy/pool/impl.py
../sqlalchemy/processors.py
../sqlalchemy/schema.py
../sqlalchemy/sql/__init__.py
../sqlalchemy/sql/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/sql/__pycache__/annotation.cpython-37.pyc
../sqlalchemy/sql/__pycache__/base.cpython-37.pyc
../sqlalchemy/sql/__pycache__/compiler.cpython-37.pyc
../sqlalchemy/sql/__pycache__/crud.cpython-37.pyc
../sqlalchemy/sql/__pycache__/ddl.cpython-37.pyc
../sqlalchemy/sql/__pycache__/default_comparator.cpython-37.pyc
../sqlalchemy/sql/__pycache__/dml.cpython-37.pyc
../sqlalchemy/sql/__pycache__/elements.cpython-37.pyc
../sqlalchemy/sql/__pycache__/expression.cpython-37.pyc
../sqlalchemy/sql/__pycache__/functions.cpython-37.pyc
../sqlalchemy/sql/__pycache__/naming.cpython-37.pyc
../sqlalchemy/sql/__pycache__/operators.cpython-37.pyc
../sqlalchemy/sql/__pycache__/schema.cpython-37.pyc
../sqlalchemy/sql/__pycache__/selectable.cpython-37.pyc
../sqlalchemy/sql/__pycache__/sqltypes.cpython-37.pyc
../sqlalchemy/sql/__pycache__/type_api.cpython-37.pyc
../sqlalchemy/sql/__pycache__/util.cpython-37.pyc
../sqlalchemy/sql/__pycache__/visitors.cpython-37.pyc
../sqlalchemy/sql/annotation.py
../sqlalchemy/sql/base.py
../sqlalchemy/sql/compiler.py
../sqlalchemy/sql/crud.py
../sqlalchemy/sql/ddl.py
../sqlalchemy/sql/default_comparator.py
../sqlalchemy/sql/dml.py
../sqlalchemy/sql/elements.py
../sqlalchemy/sql/expression.py
../sqlalchemy/sql/functions.py
../sqlalchemy/sql/naming.py
../sqlalchemy/sql/operators.py
../sqlalchemy/sql/schema.py
../sqlalchemy/sql/selectable.py
../sqlalchemy/sql/sqltypes.py
../sqlalchemy/sql/type_api.py
../sqlalchemy/sql/util.py
../sqlalchemy/sql/visitors.py
../sqlalchemy/testing/__init__.py
../sqlalchemy/testing/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/testing/__pycache__/assertions.cpython-37.pyc
../sqlalchemy/testing/__pycache__/assertsql.cpython-37.pyc
../sqlalchemy/testing/__pycache__/config.cpython-37.pyc
../sqlalchemy/testing/__pycache__/engines.cpython-37.pyc
../sqlalchemy/testing/__pycache__/entities.cpython-37.pyc
../sqlalchemy/testing/__pycache__/exclusions.cpython-37.pyc
../sqlalchemy/testing/__pycache__/fixtures.cpython-37.pyc
../sqlalchemy/testing/__pycache__/mock.cpython-37.pyc
../sqlalchemy/testing/__pycache__/pickleable.cpython-37.pyc
../sqlalchemy/testing/__pycache__/profiling.cpython-37.pyc
../sqlalchemy/testing/__pycache__/provision.cpython-37.pyc
../sqlalchemy/testing/__pycache__/replay_fixture.cpython-37.pyc
../sqlalchemy/testing/__pycache__/requirements.cpython-37.pyc
../sqlalchemy/testing/__pycache__/schema.cpython-37.pyc
../sqlalchemy/testing/__pycache__/util.cpython-37.pyc
../sqlalchemy/testing/__pycache__/warnings.cpython-37.pyc
../sqlalchemy/testing/assertions.py
../sqlalchemy/testing/assertsql.py
../sqlalchemy/testing/config.py
../sqlalchemy/testing/engines.py
../sqlalchemy/testing/entities.py
../sqlalchemy/testing/exclusions.py
../sqlalchemy/testing/fixtures.py
../sqlalchemy/testing/mock.py
../sqlalchemy/testing/pickleable.py
../sqlalchemy/testing/plugin/__init__.py
../sqlalchemy/testing/plugin/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-37.pyc
../sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-37.pyc
../sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-37.pyc
../sqlalchemy/testing/plugin/bootstrap.py
../sqlalchemy/testing/plugin/plugin_base.py
../sqlalchemy/testing/plugin/pytestplugin.py
../sqlalchemy/testing/profiling.py
../sqlalchemy/testing/provision.py
../sqlalchemy/testing/replay_fixture.py
../sqlalchemy/testing/requirements.py
../sqlalchemy/testing/schema.py
../sqlalchemy/testing/suite/__init__.py
../sqlalchemy/testing/suite/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_cte.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_insert.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_results.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_select.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_types.cpython-37.pyc
../sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-37.pyc
../sqlalchemy/testing/suite/test_cte.py
../sqlalchemy/testing/suite/test_ddl.py
../sqlalchemy/testing/suite/test_dialect.py
../sqlalchemy/testing/suite/test_insert.py
../sqlalchemy/testing/suite/test_reflection.py
../sqlalchemy/testing/suite/test_results.py
../sqlalchemy/testing/suite/test_select.py
../sqlalchemy/testing/suite/test_sequence.py
../sqlalchemy/testing/suite/test_types.py
../sqlalchemy/testing/suite/test_update_delete.py
../sqlalchemy/testing/util.py
../sqlalchemy/testing/warnings.py
../sqlalchemy/types.py
../sqlalchemy/util/__init__.py
../sqlalchemy/util/__pycache__/__init__.cpython-37.pyc
../sqlalchemy/util/__pycache__/_collections.cpython-37.pyc
../sqlalchemy/util/__pycache__/compat.cpython-37.pyc
../sqlalchemy/util/__pycache__/deprecations.cpython-37.pyc
../sqlalchemy/util/__pycache__/langhelpers.cpython-37.pyc
../sqlalchemy/util/__pycache__/queue.cpython-37.pyc
../sqlalchemy/util/__pycache__/topological.cpython-37.pyc
../sqlalchemy/util/_collections.py
../sqlalchemy/util/compat.py
../sqlalchemy/util/deprecations.py
../sqlalchemy/util/langhelpers.py
../sqlalchemy/util/queue.py
../sqlalchemy/util/topological.py
PKG-INFO
SOURCES.txt
dependency_links.txt
requires.txt
top_level.txt

View file

@ -0,0 +1,30 @@
[mssql]
pyodbc
[mssql_pymssql]
pymssql
[mssql_pyodbc]
pyodbc
[mysql]
mysqlclient
[oracle]
cx_oracle
[postgresql]
psycopg2
[postgresql_pg8000]
pg8000
[postgresql_psycopg2binary]
psycopg2-binary
[postgresql_psycopg2cffi]
psycopg2cffi
[pymysql]
pymysql

View file

@ -0,0 +1 @@
pip

View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,433 @@
Metadata-Version: 2.1
Name: aiohttp
Version: 3.5.4
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Author: Nikolay Kim
Author-email: fafhrd91@gmail.com
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
Maintainer-email: aio-libs@googlegroups.com
License: Apache 2
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.5.3
Requires-Dist: attrs (>=17.3.0)
Requires-Dist: chardet (<4.0,>=2.0)
Requires-Dist: multidict (<5.0,>=4.0)
Requires-Dist: async-timeout (<4.0,>=3.0)
Requires-Dist: yarl (<2.0,>=1.0)
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
Provides-Extra: speedups
Requires-Dist: aiodns ; extra == 'speedups'
Requires-Dist: brotlipy ; extra == 'speedups'
Requires-Dist: cchardet ; extra == 'speedups'
==================================
Async http client/server framework
==================================
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
:height: 64px
:width: 64px
:alt: aiohttp logo
|
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
:target: https://travis-ci.com/aio-libs/aiohttp
:align: right
:alt: Travis status for master branch
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
:align: right
:alt: AppVeyor status for master branch
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/aiohttp
:alt: codecov.io status for master branch
.. image:: https://badge.fury.io/py/aiohttp.svg
:target: https://pypi.org/project/aiohttp
:alt: Latest PyPI package version
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
Key Features
============
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
- Provides Web-server with middlewares and pluggable routing.
Getting started
===============
Client
------
To get something from the web:
.. code-block:: python
import aiohttp
import asyncio
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://python.org')
print(html)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Server
------
An example using a simple server:
.. code-block:: python
# examples/server_simple.py
from aiohttp import web
async def handle(request):
name = request.match_info.get('name', "Anonymous")
text = "Hello, " + name
return web.Response(text=text)
async def wshandle(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == web.WSMsgType.text:
await ws.send_str("Hello, {}".format(msg.data))
elif msg.type == web.WSMsgType.binary:
await ws.send_bytes(msg.data)
elif msg.type == web.WSMsgType.close:
break
return ws
app = web.Application()
app.add_routes([web.get('/', handle),
web.get('/echo', wshandle),
web.get('/{name}', handle)])
web.run_app(app)
Documentation
=============
https://aiohttp.readthedocs.io/
Demos
=====
https://github.com/aio-libs/aiohttp-demos
External links
==============
* `Third party libraries
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
* `Built with aiohttp
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
* `Powered by aiohttp
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
Feel free to post your questions and ideas here.
*gitter chat* https://gitter.im/aio-libs/Lobby
We support `Stack Overflow
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
Please add *aiohttp* tag to your question there.
Requirements
============
- Python >= 3.5.3
- async-timeout_
- attrs_
- chardet_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
.. _chardet: https://pypi.python.org/pypi/chardet
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
``aiohttp`` is offered under the Apache 2 license.
Keepsafe
========
The aiohttp community would like to thank Keepsafe
(https://www.getkeepsafe.com) for its support in the early days of
the project.
Source code
===========
The latest developer version is available in a GitHub repository:
https://github.com/aio-libs/aiohttp
Benchmarks
==========
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of ``access_log=True`` and the event loop being in debug mode.
`#3504 <https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work with files asynchronously.
I/O based payloads from ``payload.py`` uses a ``ThreadPoolExecutor`` to work with I/O objects asynchronously.
`#3313 <https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write.
Refactor the way how ``Payload.headers`` are handled. Payload instances now always
have headers and Content-Type defined.
Fix Payload Content-Disposition header reset after initial creation.
`#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed data
`#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to avoid a deprecation warning.
`#3480 <https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from `Optional[float]` to `Optional[int]`.
`#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387 <https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809 <https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request``
can now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174 <https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177 <https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression to run in a background executor to avoid blocking the main thread and potentially triggering health check failures. (`#3205 <https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set `ClientTimeout` in `aiohttp.request` (`#3213 <https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc`` file
doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app
If the `Application.debug` flag is set and the default logger `aiohttp.access` is used, access logs will now be output using a `stderr` `StreamHandler` if no handlers are attached. Furthermore, if the default logger has no log level set, the log level will be set to `DEBUG`. (`#3324 <https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``.
Sometimes server API requires a different HTTP method for WebSocket connection establishment.
For example, ``Docker exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406 <https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing `access_log_class` via `handler_args` (`#3158 <https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186 <https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233 <https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235 <https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239 <https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258 <https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265 <https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the collections module will not be supported anymore. (`#3273 <https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by `normalize_path_middleware`. (`#3278 <https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290 <https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304 <https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308 <https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function (`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364 <https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152 <https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215 <https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229 <https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278 <https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260 <https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318 <https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331 <https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop`` properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381 <https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385 <https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await connector.close()`` instead. (`#3417 <https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession`` constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351

View file

@ -0,0 +1,124 @@
aiohttp-3.5.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiohttp-3.5.4.dist-info/LICENSE.txt,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
aiohttp-3.5.4.dist-info/METADATA,sha256=vpBjLbRZ9Tbi4DEj6aDUlGbj-HJPHa8Wihktdh4Z9U0,16950
aiohttp-3.5.4.dist-info/RECORD,,
aiohttp-3.5.4.dist-info/WHEEL,sha256=0XRAUr92PGDyTl_2nDyFZS1y0mC0Tb6FBjKSU09tHPA,109
aiohttp-3.5.4.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
aiohttp/__init__.py,sha256=GdkiBfeUQa38wScKQYLH4mRL-20MqARHC0ljV6Naf8w,4948
aiohttp/__pycache__/__init__.cpython-37.pyc,,
aiohttp/__pycache__/abc.cpython-37.pyc,,
aiohttp/__pycache__/base_protocol.cpython-37.pyc,,
aiohttp/__pycache__/client.cpython-37.pyc,,
aiohttp/__pycache__/client_exceptions.cpython-37.pyc,,
aiohttp/__pycache__/client_proto.cpython-37.pyc,,
aiohttp/__pycache__/client_reqrep.cpython-37.pyc,,
aiohttp/__pycache__/client_ws.cpython-37.pyc,,
aiohttp/__pycache__/connector.cpython-37.pyc,,
aiohttp/__pycache__/cookiejar.cpython-37.pyc,,
aiohttp/__pycache__/formdata.cpython-37.pyc,,
aiohttp/__pycache__/frozenlist.cpython-37.pyc,,
aiohttp/__pycache__/hdrs.cpython-37.pyc,,
aiohttp/__pycache__/helpers.cpython-37.pyc,,
aiohttp/__pycache__/http.cpython-37.pyc,,
aiohttp/__pycache__/http_exceptions.cpython-37.pyc,,
aiohttp/__pycache__/http_parser.cpython-37.pyc,,
aiohttp/__pycache__/http_websocket.cpython-37.pyc,,
aiohttp/__pycache__/http_writer.cpython-37.pyc,,
aiohttp/__pycache__/locks.cpython-37.pyc,,
aiohttp/__pycache__/log.cpython-37.pyc,,
aiohttp/__pycache__/multipart.cpython-37.pyc,,
aiohttp/__pycache__/payload.cpython-37.pyc,,
aiohttp/__pycache__/payload_streamer.cpython-37.pyc,,
aiohttp/__pycache__/pytest_plugin.cpython-37.pyc,,
aiohttp/__pycache__/resolver.cpython-37.pyc,,
aiohttp/__pycache__/signals.cpython-37.pyc,,
aiohttp/__pycache__/streams.cpython-37.pyc,,
aiohttp/__pycache__/tcp_helpers.cpython-37.pyc,,
aiohttp/__pycache__/test_utils.cpython-37.pyc,,
aiohttp/__pycache__/tracing.cpython-37.pyc,,
aiohttp/__pycache__/typedefs.cpython-37.pyc,,
aiohttp/__pycache__/web.cpython-37.pyc,,
aiohttp/__pycache__/web_app.cpython-37.pyc,,
aiohttp/__pycache__/web_exceptions.cpython-37.pyc,,
aiohttp/__pycache__/web_fileresponse.cpython-37.pyc,,
aiohttp/__pycache__/web_log.cpython-37.pyc,,
aiohttp/__pycache__/web_middlewares.cpython-37.pyc,,
aiohttp/__pycache__/web_protocol.cpython-37.pyc,,
aiohttp/__pycache__/web_request.cpython-37.pyc,,
aiohttp/__pycache__/web_response.cpython-37.pyc,,
aiohttp/__pycache__/web_routedef.cpython-37.pyc,,
aiohttp/__pycache__/web_runner.cpython-37.pyc,,
aiohttp/__pycache__/web_server.cpython-37.pyc,,
aiohttp/__pycache__/web_urldispatcher.cpython-37.pyc,,
aiohttp/__pycache__/web_ws.cpython-37.pyc,,
aiohttp/__pycache__/worker.cpython-37.pyc,,
aiohttp/_cparser.pxd,sha256=tgw30SL6kQSczzGMlMhx2Cuhf_O8P8ZPimVCb85xILc,3959
aiohttp/_find_header.c,sha256=lWc5w3UZiVd3ni60DuFDSSPzsaQUhAQcERDGBOqeML8,189932
aiohttp/_find_header.h,sha256=5oOgQ85nF6V7rpU8NhyE5vyGkTo1Cgf1GIYrtxSTzQI,170
aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68
aiohttp/_frozenlist.c,sha256=y98wE-b4rxP9GCcT-WCx5Pt_WU3RBrs-w-QeKZ7XE34,287338
aiohttp/_frozenlist.cpython-37m-x86_64-linux-gnu.so,sha256=N43hZAJdoc0SQZnsQ72IIh3ODDDEZR8dcHArr6Wb0G4,310748
aiohttp/_frozenlist.pyx,sha256=BD8LcERExsWdo4qzuuQ84f-L_pHVzkUQO0lEAOe3Fog,2605
aiohttp/_headers.pxi,sha256=XgJL5FQRwL4uZQfegYShPclsErUlvG_xuMHs7dp_2-o,2027
aiohttp/_helpers.c,sha256=QhGjJ2v5NuDgkX23Bmcs7HpowupE1tz1im8PuZdISbI,207048
aiohttp/_helpers.cpython-37m-x86_64-linux-gnu.so,sha256=lobzEWAei1Lkxx1SFiFVHXm4eNxgIDJi03yEUfzZ3RM,213821
aiohttp/_helpers.pyi,sha256=mJRb5YdG8DxYbPfVddGRGmi93qqaJM30L1qFpgSKQuA,204
aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049
aiohttp/_http_parser.c,sha256=WIyFvlOUcAmTRpBPeFl57lbj9hkQfuMWNTPDzOku-cc,994097
aiohttp/_http_parser.cpython-37m-x86_64-linux-gnu.so,sha256=eRzfo0GowqCsVVxG_7s2qOmR4XiMUE1jKtqS5JMpqq8,1558531
aiohttp/_http_parser.pyx,sha256=qAeXR88_UXU2ontoLIq7hg7M2KHjY982iJeH_u7aXXs,28672
aiohttp/_http_writer.c,sha256=YyFijS8A3erHfOa8hb95PHfnZJnxEnLdHHkVME0o8bE,205820
aiohttp/_http_writer.cpython-37m-x86_64-linux-gnu.so,sha256=CqASRWdGlkowylQgw8FMYyGT5BL48qc_zhRAxRcFhzo,204055
aiohttp/_http_writer.pyx,sha256=vnanyXytNqyi6oqxELg5ARJ8LhtB8mDGxNfz6DdvH6E,4193
aiohttp/_websocket.c,sha256=uMC3H3T9yqN7fsOcYC0gLNfcrk3QkV199WLaHoshW0U,135134
aiohttp/_websocket.cpython-37m-x86_64-linux-gnu.so,sha256=LdzRUpovLfMOVKb6IezQvaZFTMvT7X8awC2bz3MZKt8,104661
aiohttp/_websocket.pyx,sha256=tJfygcVwKF_Xb6Pg48a6t50YO2xY4Rg0Wj7LcJJMi-U,1559
aiohttp/abc.py,sha256=lsf2bz-9KtqLhtI-e-tmgp3ynziMypYyEHvwOnFg7lQ,5392
aiohttp/base_protocol.py,sha256=kv6AbDw8ZQOyB9Hm2bOaPZyXcAbUUwFOO2lbAmArpfw,2644
aiohttp/client.py,sha256=hXh0WgGqhl80gVDlkuzgrHVaCxxkg_A9_mrhOkdSb-s,42549
aiohttp/client_exceptions.py,sha256=3e7SWwDXDhUO5npOhwgdL6K8tXMTdVyv58rjQboY4Yo,7547
aiohttp/client_proto.py,sha256=l1bLzhVx8hHOuS8nBPH6wNU15S-P6z_OMtpx_tPRi54,8001
aiohttp/client_reqrep.py,sha256=LUhjuCGyJs55LcH_Sr3AMcAhS1XlcCPM73rc8C3_GV0,35793
aiohttp/client_ws.py,sha256=AQlj-peBA0mGyra1t38sWlfV28MEM0SAATRXp1TsF9I,10694
aiohttp/connector.py,sha256=AORmJFz8WLuAjca5O582FKCC74f6emuXdZfhWzvPpx4,39556
aiohttp/cookiejar.py,sha256=ghkcBC9JhqKFz3InpJ4l2_stXLVv6qORX1303vepQUI,11268
aiohttp/formdata.py,sha256=VZCo9kmDb50lQUcRMDfAH3d5lnRxBq_AX38ge8vFI00,5807
aiohttp/frozenlist.py,sha256=I4zR368wRHXp402Z3f5lhd5i48b6A66MhHncW1JGkb4,1781
aiohttp/frozenlist.pyi,sha256=fkQEKqDR6nOjXDx2cXvfCcetoMQQdzjXs2uoA7uVaP4,1431
aiohttp/hdrs.py,sha256=iaXnHXOR_Dx0rvVkvmIZhc-7Egf2ByuSDI9tqskS0kQ,3449
aiohttp/helpers.py,sha256=q_AZMU7hOJBvtTklhQpwa1DTH3uR5h2ZA0vLlsVGSQs,22633
aiohttp/http.py,sha256=mYXbwDI8bF9D1RShF0EGtVTx7OgIyksbmKR4b_4RgBo,1385
aiohttp/http_exceptions.py,sha256=yb2XryY_kktgiADcYn1nS0Dm-RVhhy0J6R0qfg-JyWo,2358
aiohttp/http_parser.py,sha256=v9csKsBv-rmOir1ikRBcDJDAaPMsFen1HoP8_Viz6xE,27912
aiohttp/http_websocket.py,sha256=GpysCWVOOQyRzvLSq0IHhVG0goWSnv5Rmwf91uUwowI,24594
aiohttp/http_writer.py,sha256=XhGCqy_lzdLyxIzjQ_ufPFfJKTTWx1sb6YZWvrOFUPA,5239
aiohttp/locks.py,sha256=l-cW8wUbIkHaovghT7gpY8Yp5Vlo-u2G7_CR5xQqEQ8,1234
aiohttp/log.py,sha256=kOWU6EcyBQESISm27vc8dVEz_h9zxozLa5WCya1RzhQ,325
aiohttp/multipart.py,sha256=h76ZKaEdP2moxWK0qNydR7zYMgGMoyqkkRssTmrtx1A,32277
aiohttp/payload.py,sha256=QjzdcLJ89GGqFSN_SdMgEvw_Id4UEXZ9mL_2fAGF4gk,14027
aiohttp/payload_streamer.py,sha256=ZNWaWwAxOIricwfjH4-YrkCqehowVizM6fJ_JVDR480,2103
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
aiohttp/pytest_plugin.py,sha256=8KOUt8KXu_3NkPQ8DYwgqKfdAvVZ--zHnm0EQiKFPkI,10332
aiohttp/resolver.py,sha256=pRF91jOjTNuCll5TMRjTe1OxnGZK4wjAggYLgvzXkGQ,3626
aiohttp/signals.py,sha256=_ge2XQXBDWHoyCI4E-nXC-sOEJGVrJm0zYGHH0E5woQ,948
aiohttp/signals.pyi,sha256=mrEA9Ve08W22L_yI8_F7PkdQUjid_VsL3o9tcC0Ud0E,325
aiohttp/streams.py,sha256=i1Q7_RzolpEQ63AkalkeeSHsMPOaHAfjnwlxvRmYi-k,20371
aiohttp/tcp_helpers.py,sha256=1WVYM2C-HZQpgcksTyadRsl2_WeuXh_ECUxCcwji5d8,1631
aiohttp/test_utils.py,sha256=0a0034sQM72grdRxjTnYpHtkUvMwstshfc9jVPXsZ1U,20525
aiohttp/tracing.py,sha256=yfOJWzRQgRdDcdjsDLqPul3anYyVFhztDeyoM01oIq8,12662
aiohttp/typedefs.py,sha256=6HXEWJNZGUuNewFQUjSkCzKP8rQVZSKqfdNnIgofZWs,1259
aiohttp/web.py,sha256=2edP5uK2BU6wTXAWzGp2lgYq_CyU3vzLaQa0I_Ehg_0,15121
aiohttp/web_app.py,sha256=vKuHVhH9d-Qg5Pg1A8MbaZPeJttkSsghpuo2JYvUJks,17212
aiohttp/web_exceptions.py,sha256=-CQI325lMa9W-1WeJ2RlHApOOQ74ctHd6OyeKG_EyT4,10079
aiohttp/web_fileresponse.py,sha256=0Oln1kTqD80EhftG2jqVbsuSLr0Gbjpuk4T3D06fFjk,12712
aiohttp/web_log.py,sha256=J33FXqV36hWcyk8YfFNXDj3SI40uoOQzEX2Fhni7bzc,8269
aiohttp/web_middlewares.py,sha256=BY05dLo9rsRZttRmjDUHEokiHQLzW_ffENZL9q-Grf4,4188
aiohttp/web_protocol.py,sha256=q0zEVHMSLdmUw_KdI6zVeOj_k3lLZWMj4PJHo8h9c54,21394
aiohttp/web_request.py,sha256=M8ARRuEso-V7G675-xWY-lqLBGDmBVRGPujaufKZGuo,25234
aiohttp/web_response.py,sha256=nmldFBqLLaCECoaYUw54-2BVHB6Xz6XgGMK0O5ymrjo,25511
aiohttp/web_routedef.py,sha256=jQ8Y0hDHYuMBTtsuo17qjkQLBMoacbkh4zaUdwSJJ8s,6077
aiohttp/web_runner.py,sha256=_LUDpAc6vDOWfNJ-DBj3NZPtID0gBPH6JeMXtGSt4OU,10088
aiohttp/web_server.py,sha256=527MjryEIqWArFHMJlEABg3TcZgYtyJIFHY19Yvf3AI,2165
aiohttp/web_urldispatcher.py,sha256=x-O0Tqxn6xqMdQ5Qrg0hxIli-DbOfxLEDpgX_j_FGQU,38788
aiohttp/web_ws.py,sha256=7UpGsVFZw_YtpJOWPLeDnGmL6PtirxAkc8r-pGUQbt0,17082
aiohttp/worker.py,sha256=hekSLWLEJVrHrIrZ3dQga7Jzgtx_Cf3ZW7Zfd1J1G3A,8178

View file

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.32.3)
Root-Is-Purelib: false
Tag: cp37-cp37m-manylinux1_x86_64

View file

@ -0,0 +1 @@
aiohttp

View file

@ -0,0 +1,216 @@
__version__ = '3.5.4'
from typing import Tuple # noqa
from . import hdrs
from .client import (
BaseConnector,
ClientConnectionError,
ClientConnectorCertificateError,
ClientConnectorError,
ClientConnectorSSLError,
ClientError,
ClientHttpProxyError,
ClientOSError,
ClientPayloadError,
ClientProxyConnectionError,
ClientResponse,
ClientRequest,
ClientResponseError,
ClientSSLError,
ClientSession,
ClientTimeout,
ClientWebSocketResponse,
ContentTypeError,
Fingerprint,
InvalidURL,
RequestInfo,
ServerConnectionError,
ServerDisconnectedError,
ServerFingerprintMismatch,
ServerTimeoutError,
TCPConnector,
UnixConnector,
WSServerHandshakeError,
request
)
from .cookiejar import CookieJar, DummyCookieJar
from .formdata import FormData
from .helpers import BasicAuth, ChainMapProxy
from .http import (
HttpVersion,
HttpVersion10,
HttpVersion11,
WSMsgType,
WSCloseCode,
WSMessage,
WebSocketError
)
from .multipart import (
BadContentDispositionHeader,
BadContentDispositionParam,
BodyPartReader,
MultipartReader,
MultipartWriter,
content_disposition_filename,
parse_content_disposition
)
from .payload import (
AsyncIterablePayload,
BufferedReaderPayload,
BytesIOPayload,
BytesPayload,
IOBasePayload,
JsonPayload,
PAYLOAD_REGISTRY,
Payload,
StringIOPayload,
StringPayload,
TextIOPayload,
get_payload,
payload_type
)
from .payload_streamer import streamer
from .resolver import AsyncResolver, DefaultResolver, ThreadedResolver
from .signals import Signal
from .streams import (
DataQueue,
EMPTY_PAYLOAD,
EofStream,
FlowControlDataQueue,
StreamReader
)
from .tracing import (
TraceConfig,
TraceConnectionCreateEndParams,
TraceConnectionCreateStartParams,
TraceConnectionQueuedEndParams,
TraceConnectionQueuedStartParams,
TraceConnectionReuseconnParams,
TraceDnsCacheHitParams,
TraceDnsCacheMissParams,
TraceDnsResolveHostEndParams,
TraceDnsResolveHostStartParams,
TraceRequestChunkSentParams,
TraceRequestEndParams,
TraceRequestExceptionParams,
TraceRequestRedirectParams,
TraceRequestStartParams,
TraceResponseChunkReceivedParams
)
__all__ = (
'hdrs',
# client
'BaseConnector',
'ClientConnectionError',
'ClientConnectorCertificateError',
'ClientConnectorError',
'ClientConnectorSSLError',
'ClientError',
'ClientHttpProxyError',
'ClientOSError',
'ClientPayloadError',
'ClientProxyConnectionError',
'ClientResponse',
'ClientRequest',
'ClientResponseError',
'ClientSSLError',
'ClientSession',
'ClientTimeout',
'ClientWebSocketResponse',
'ContentTypeError',
'Fingerprint',
'InvalidURL',
'RequestInfo',
'ServerConnectionError',
'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ServerTimeoutError',
'TCPConnector',
'UnixConnector',
'WSServerHandshakeError',
'request',
# cookiejar
'CookieJar',
'DummyCookieJar',
# formdata
'FormData',
# helpers
'BasicAuth',
'ChainMapProxy',
# http
'HttpVersion',
'HttpVersion10',
'HttpVersion11',
'WSMsgType',
'WSCloseCode',
'WSMessage',
'WebSocketError',
# multipart
'BadContentDispositionHeader',
'BadContentDispositionParam',
'BodyPartReader',
'MultipartReader',
'MultipartWriter',
'content_disposition_filename',
'parse_content_disposition',
# payload
'AsyncIterablePayload',
'BufferedReaderPayload',
'BytesIOPayload',
'BytesPayload',
'IOBasePayload',
'JsonPayload',
'PAYLOAD_REGISTRY',
'Payload',
'StringIOPayload',
'StringPayload',
'TextIOPayload',
'get_payload',
'payload_type',
# payload_streamer
'streamer',
# resolver
'AsyncResolver',
'DefaultResolver',
'ThreadedResolver',
# signals
'Signal',
'DataQueue',
'EMPTY_PAYLOAD',
'EofStream',
'FlowControlDataQueue',
'StreamReader',
# tracing
'TraceConfig',
'TraceConnectionCreateEndParams',
'TraceConnectionCreateStartParams',
'TraceConnectionQueuedEndParams',
'TraceConnectionQueuedStartParams',
'TraceConnectionReuseconnParams',
'TraceDnsCacheHitParams',
'TraceDnsCacheMissParams',
'TraceDnsResolveHostEndParams',
'TraceDnsResolveHostStartParams',
'TraceRequestChunkSentParams',
'TraceRequestEndParams',
'TraceRequestExceptionParams',
'TraceRequestRedirectParams',
'TraceRequestStartParams',
'TraceResponseChunkReceivedParams',
) # type: Tuple[str, ...]
try:
from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa
__all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker')
except ImportError: # pragma: no cover
pass

View file

@ -0,0 +1,140 @@
from libc.stdint cimport uint16_t, uint32_t, uint64_t
cdef extern from "../vendor/http-parser/http_parser.h":
ctypedef int (*http_data_cb) (http_parser*,
const char *at,
size_t length) except -1
ctypedef int (*http_cb) (http_parser*) except -1
struct http_parser:
unsigned int type
unsigned int flags
unsigned int state
unsigned int header_state
unsigned int index
uint32_t nread
uint64_t content_length
unsigned short http_major
unsigned short http_minor
unsigned int status_code
unsigned int method
unsigned int http_errno
unsigned int upgrade
void *data
struct http_parser_settings:
http_cb on_message_begin
http_data_cb on_url
http_data_cb on_status
http_data_cb on_header_field
http_data_cb on_header_value
http_cb on_headers_complete
http_data_cb on_body
http_cb on_message_complete
http_cb on_chunk_header
http_cb on_chunk_complete
enum http_parser_type:
HTTP_REQUEST,
HTTP_RESPONSE,
HTTP_BOTH
enum http_errno:
HPE_OK,
HPE_CB_message_begin,
HPE_CB_url,
HPE_CB_header_field,
HPE_CB_header_value,
HPE_CB_headers_complete,
HPE_CB_body,
HPE_CB_message_complete,
HPE_CB_status,
HPE_CB_chunk_header,
HPE_CB_chunk_complete,
HPE_INVALID_EOF_STATE,
HPE_HEADER_OVERFLOW,
HPE_CLOSED_CONNECTION,
HPE_INVALID_VERSION,
HPE_INVALID_STATUS,
HPE_INVALID_METHOD,
HPE_INVALID_URL,
HPE_INVALID_HOST,
HPE_INVALID_PORT,
HPE_INVALID_PATH,
HPE_INVALID_QUERY_STRING,
HPE_INVALID_FRAGMENT,
HPE_LF_EXPECTED,
HPE_INVALID_HEADER_TOKEN,
HPE_INVALID_CONTENT_LENGTH,
HPE_INVALID_CHUNK_SIZE,
HPE_INVALID_CONSTANT,
HPE_INVALID_INTERNAL_STATE,
HPE_STRICT,
HPE_PAUSED,
HPE_UNKNOWN
enum flags:
F_CHUNKED,
F_CONNECTION_KEEP_ALIVE,
F_CONNECTION_CLOSE,
F_CONNECTION_UPGRADE,
F_TRAILING,
F_UPGRADE,
F_SKIPBODY,
F_CONTENTLENGTH
enum http_method:
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
LINK, UNLINK
void http_parser_init(http_parser *parser, http_parser_type type)
size_t http_parser_execute(http_parser *parser,
const http_parser_settings *settings,
const char *data,
size_t len)
int http_should_keep_alive(const http_parser *parser)
void http_parser_settings_init(http_parser_settings *settings)
const char *http_errno_name(http_errno err)
const char *http_errno_description(http_errno err)
const char *http_method_str(http_method m)
# URL Parser
enum http_parser_url_fields:
UF_SCHEMA = 0,
UF_HOST = 1,
UF_PORT = 2,
UF_PATH = 3,
UF_QUERY = 4,
UF_FRAGMENT = 5,
UF_USERINFO = 6,
UF_MAX = 7
struct http_parser_url_field_data:
uint16_t off
uint16_t len
struct http_parser_url:
uint16_t field_set
uint16_t port
http_parser_url_field_data[<int>UF_MAX] field_data
void http_parser_url_init(http_parser_url *u)
int http_parser_parse_url(const char *buf,
size_t buflen,
int is_connect,
http_parser_url *u)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,14 @@
#ifndef _FIND_HEADERS_H
#define _FIND_HEADERS_H
#ifdef __cplusplus
extern "C" {
#endif
int find_header(const char *str, int size);
#ifdef __cplusplus
}
#endif
#endif

View file

@ -0,0 +1,2 @@
cdef extern from "_find_header.h":
int find_header(char *, int)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,108 @@
from collections.abc import MutableSequence
cdef class FrozenList:
cdef readonly bint frozen
cdef list _items
def __init__(self, items=None):
self.frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
cdef object _check_frozen(self):
if self.frozen:
raise RuntimeError("Cannot modify frozen list.")
cdef inline object _fast_len(self):
return len(self._items)
def freeze(self):
self.frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
self._check_frozen()
self._items[index] = value
def __delitem__(self, index):
self._check_frozen()
del self._items[index]
def __len__(self):
return self._fast_len()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __richcmp__(self, other, op):
if op == 0: # <
return list(self) < other
if op == 1: # <=
return list(self) <= other
if op == 2: # ==
return list(self) == other
if op == 3: # !=
return list(self) != other
if op == 4: # >
return list(self) > other
if op == 5: # =>
return list(self) >= other
def insert(self, pos, item):
self._check_frozen()
self._items.insert(pos, item)
def __contains__(self, item):
return item in self._items
def __iadd__(self, items):
self._check_frozen()
self._items += list(items)
return self
def index(self, item):
return self._items.index(item)
def remove(self, item):
self._check_frozen()
self._items.remove(item)
def clear(self):
self._check_frozen()
self._items.clear()
def extend(self, items):
self._check_frozen()
self._items += list(items)
def reverse(self):
self._check_frozen()
self._items.reverse()
def pop(self, index=-1):
self._check_frozen()
return self._items.pop(index)
def append(self, item):
self._check_frozen()
return self._items.append(item)
def count(self, item):
return self._items.count(item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
self._items)
MutableSequence.register(FrozenList)

View file

@ -0,0 +1,84 @@
# The file is autogenerated from aiohttp/hdrs.py
# Run ./tools/gen.py to update it after the origin changing.
from . import hdrs
cdef tuple headers = (
hdrs.ACCEPT,
hdrs.ACCEPT_CHARSET,
hdrs.ACCEPT_ENCODING,
hdrs.ACCEPT_LANGUAGE,
hdrs.ACCEPT_RANGES,
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
hdrs.ACCESS_CONTROL_MAX_AGE,
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
hdrs.AGE,
hdrs.ALLOW,
hdrs.AUTHORIZATION,
hdrs.CACHE_CONTROL,
hdrs.CONNECTION,
hdrs.CONTENT_DISPOSITION,
hdrs.CONTENT_ENCODING,
hdrs.CONTENT_LANGUAGE,
hdrs.CONTENT_LENGTH,
hdrs.CONTENT_LOCATION,
hdrs.CONTENT_MD5,
hdrs.CONTENT_RANGE,
hdrs.CONTENT_TRANSFER_ENCODING,
hdrs.CONTENT_TYPE,
hdrs.COOKIE,
hdrs.DATE,
hdrs.DESTINATION,
hdrs.DIGEST,
hdrs.ETAG,
hdrs.EXPECT,
hdrs.EXPIRES,
hdrs.FORWARDED,
hdrs.FROM,
hdrs.HOST,
hdrs.IF_MATCH,
hdrs.IF_MODIFIED_SINCE,
hdrs.IF_NONE_MATCH,
hdrs.IF_RANGE,
hdrs.IF_UNMODIFIED_SINCE,
hdrs.KEEP_ALIVE,
hdrs.LAST_EVENT_ID,
hdrs.LAST_MODIFIED,
hdrs.LINK,
hdrs.LOCATION,
hdrs.MAX_FORWARDS,
hdrs.ORIGIN,
hdrs.PRAGMA,
hdrs.PROXY_AUTHENTICATE,
hdrs.PROXY_AUTHORIZATION,
hdrs.RANGE,
hdrs.REFERER,
hdrs.RETRY_AFTER,
hdrs.SEC_WEBSOCKET_ACCEPT,
hdrs.SEC_WEBSOCKET_EXTENSIONS,
hdrs.SEC_WEBSOCKET_KEY,
hdrs.SEC_WEBSOCKET_KEY1,
hdrs.SEC_WEBSOCKET_PROTOCOL,
hdrs.SEC_WEBSOCKET_VERSION,
hdrs.SERVER,
hdrs.SET_COOKIE,
hdrs.TE,
hdrs.TRAILER,
hdrs.TRANSFER_ENCODING,
hdrs.UPGRADE,
hdrs.URI,
hdrs.USER_AGENT,
hdrs.VARY,
hdrs.VIA,
hdrs.WANT_DIGEST,
hdrs.WARNING,
hdrs.WEBSOCKET,
hdrs.WWW_AUTHENTICATE,
hdrs.X_FORWARDED_FOR,
hdrs.X_FORWARDED_HOST,
hdrs.X_FORWARDED_PROTO,
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,8 @@
from typing import Any
class reify:
def __init__(self, wrapped: Any) -> None: ...
def __get__(self, inst: Any, owner: Any) -> Any: ...
def __set__(self, inst: Any, value: Any) -> None: ...

View file

@ -0,0 +1,35 @@
cdef class reify:
"""Use as a class method decorator. It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
cdef object wrapped
cdef object name
def __init__(self, wrapped):
self.wrapped = wrapped
self.name = wrapped.__name__
@property
def __doc__(self):
return self.wrapped.__doc__
def __get__(self, inst, owner):
try:
try:
return inst._cache[self.name]
except KeyError:
val = self.wrapped(inst)
inst._cache[self.name] = val
return val
except AttributeError:
if inst is None:
return self
raise
def __set__(self, inst, value):
raise AttributeError("reified property is read-only")

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,845 @@
#cython: language_level=3
#
# Based on https://github.com/MagicStack/httptools
#
from __future__ import absolute_import, print_function
from cpython.mem cimport PyMem_Malloc, PyMem_Free
from libc.string cimport memcpy
from cpython cimport (PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE,
Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize)
from multidict import (CIMultiDict as _CIMultiDict,
CIMultiDictProxy as _CIMultiDictProxy)
from yarl import URL as _URL
from aiohttp import hdrs
from .http_exceptions import (
BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError,
PayloadEncodingError, ContentLengthError, TransferEncodingError)
from .http_writer import (HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11)
from .http_parser import DeflateBuffer as _DeflateBuffer
from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD,
StreamReader as _StreamReader)
cimport cython
from aiohttp cimport _cparser as cparser
include "_headers.pxi"
from aiohttp cimport _find_header
DEF DEFAULT_FREELIST_SIZE = 250
cdef extern from "Python.h":
int PyByteArray_Resize(object, Py_ssize_t) except -1
Py_ssize_t PyByteArray_Size(object) except -1
char* PyByteArray_AsString(object)
__all__ = ('HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage')
cdef object URL = _URL
cdef object URL_build = URL.build
cdef object CIMultiDict = _CIMultiDict
cdef object CIMultiDictProxy = _CIMultiDictProxy
cdef object HttpVersion = _HttpVersion
cdef object HttpVersion10 = _HttpVersion10
cdef object HttpVersion11 = _HttpVersion11
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
cdef object StreamReader = _StreamReader
cdef object DeflateBuffer = _DeflateBuffer
cdef inline object extend(object buf, const char* at, size_t length):
cdef Py_ssize_t s
cdef char* ptr
s = PyByteArray_Size(buf)
PyByteArray_Resize(buf, s + length)
ptr = PyByteArray_AsString(buf)
memcpy(ptr + s, at, length)
DEF METHODS_COUNT = 34;
cdef list _http_method = []
for i in range(METHODS_COUNT):
_http_method.append(
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
cdef inline str http_method_str(int i):
if i < METHODS_COUNT:
return <str>_http_method[i]
else:
return "<unknown>"
cdef inline object find_header(bytes raw_header):
cdef Py_ssize_t size
cdef char *buf
cdef int idx
PyBytes_AsStringAndSize(raw_header, &buf, &size)
idx = _find_header.find_header(buf, size)
if idx == -1:
return raw_header.decode('utf-8', 'surrogateescape')
return headers[idx]
@cython.freelist(DEFAULT_FREELIST_SIZE)
cdef class RawRequestMessage:
cdef readonly str method
cdef readonly str path
cdef readonly object version # HttpVersion
cdef readonly object headers # CIMultiDict
cdef readonly object raw_headers # tuple
cdef readonly object should_close
cdef readonly object compression
cdef readonly object upgrade
cdef readonly object chunked
cdef readonly object url # yarl.URL
def __init__(self, method, path, version, headers, raw_headers,
should_close, compression, upgrade, chunked, url):
self.method = method
self.path = path
self.version = version
self.headers = headers
self.raw_headers = raw_headers
self.should_close = should_close
self.compression = compression
self.upgrade = upgrade
self.chunked = chunked
self.url = url
def __repr__(self):
info = []
info.append(("method", self.method))
info.append(("path", self.path))
info.append(("version", self.version))
info.append(("headers", self.headers))
info.append(("raw_headers", self.raw_headers))
info.append(("should_close", self.should_close))
info.append(("compression", self.compression))
info.append(("upgrade", self.upgrade))
info.append(("chunked", self.chunked))
info.append(("url", self.url))
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
return '<RawRequestMessage(' + sinfo + ')>'
def _replace(self, **dct):
cdef RawRequestMessage ret
ret = _new_request_message(self.method,
self.path,
self.version,
self.headers,
self.raw_headers,
self.should_close,
self.compression,
self.upgrade,
self.chunked,
self.url)
if "method" in dct:
ret.method = dct["method"]
if "path" in dct:
ret.path = dct["path"]
if "version" in dct:
ret.version = dct["version"]
if "headers" in dct:
ret.headers = dct["headers"]
if "raw_headers" in dct:
ret.raw_headers = dct["raw_headers"]
if "should_close" in dct:
ret.should_close = dct["should_close"]
if "compression" in dct:
ret.compression = dct["compression"]
if "upgrade" in dct:
ret.upgrade = dct["upgrade"]
if "chunked" in dct:
ret.chunked = dct["chunked"]
if "url" in dct:
ret.url = dct["url"]
return ret
cdef _new_request_message(str method,
str path,
object version,
object headers,
object raw_headers,
bint should_close,
object compression,
bint upgrade,
bint chunked,
object url):
cdef RawRequestMessage ret
ret = RawRequestMessage.__new__(RawRequestMessage)
ret.method = method
ret.path = path
ret.version = version
ret.headers = headers
ret.raw_headers = raw_headers
ret.should_close = should_close
ret.compression = compression
ret.upgrade = upgrade
ret.chunked = chunked
ret.url = url
return ret
@cython.freelist(DEFAULT_FREELIST_SIZE)
cdef class RawResponseMessage:
cdef readonly object version # HttpVersion
cdef readonly int code
cdef readonly str reason
cdef readonly object headers # CIMultiDict
cdef readonly object raw_headers # tuple
cdef readonly object should_close
cdef readonly object compression
cdef readonly object upgrade
cdef readonly object chunked
def __init__(self, version, code, reason, headers, raw_headers,
should_close, compression, upgrade, chunked):
self.version = version
self.code = code
self.reason = reason
self.headers = headers
self.raw_headers = raw_headers
self.should_close = should_close
self.compression = compression
self.upgrade = upgrade
self.chunked = chunked
def __repr__(self):
info = []
info.append(("version", self.version))
info.append(("code", self.code))
info.append(("reason", self.reason))
info.append(("headers", self.headers))
info.append(("raw_headers", self.raw_headers))
info.append(("should_close", self.should_close))
info.append(("compression", self.compression))
info.append(("upgrade", self.upgrade))
info.append(("chunked", self.chunked))
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
return '<RawResponseMessage(' + sinfo + ')>'
cdef _new_response_message(object version,
int code,
str reason,
object headers,
object raw_headers,
bint should_close,
object compression,
bint upgrade,
bint chunked):
cdef RawResponseMessage ret
ret = RawResponseMessage.__new__(RawResponseMessage)
ret.version = version
ret.code = code
ret.reason = reason
ret.headers = headers
ret.raw_headers = raw_headers
ret.should_close = should_close
ret.compression = compression
ret.upgrade = upgrade
ret.chunked = chunked
return ret
@cython.internal
cdef class HttpParser:
cdef:
cparser.http_parser* _cparser
cparser.http_parser_settings* _csettings
bytearray _raw_name
bytearray _raw_value
bint _has_value
object _protocol
object _loop
object _timer
size_t _max_line_size
size_t _max_field_size
size_t _max_headers
bint _response_with_body
bint _started
object _url
bytearray _buf
str _path
str _reason
object _headers
list _raw_headers
bint _upgraded
list _messages
object _payload
bint _payload_error
object _payload_exception
object _last_error
bint _auto_decompress
str _content_encoding
Py_buffer py_buf
def __cinit__(self):
self._cparser = <cparser.http_parser*> \
PyMem_Malloc(sizeof(cparser.http_parser))
if self._cparser is NULL:
raise MemoryError()
self._csettings = <cparser.http_parser_settings*> \
PyMem_Malloc(sizeof(cparser.http_parser_settings))
if self._csettings is NULL:
raise MemoryError()
def __dealloc__(self):
PyMem_Free(self._cparser)
PyMem_Free(self._csettings)
cdef _init(self, cparser.http_parser_type mode,
object protocol, object loop, object timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint auto_decompress=True):
cparser.http_parser_init(self._cparser, mode)
self._cparser.data = <void*>self
self._cparser.content_length = 0
cparser.http_parser_settings_init(self._csettings)
self._protocol = protocol
self._loop = loop
self._timer = timer
self._buf = bytearray()
self._payload = None
self._payload_error = 0
self._payload_exception = payload_exception
self._messages = []
self._raw_name = bytearray()
self._raw_value = bytearray()
self._has_value = False
self._max_line_size = max_line_size
self._max_headers = max_headers
self._max_field_size = max_field_size
self._response_with_body = response_with_body
self._upgraded = False
self._auto_decompress = auto_decompress
self._content_encoding = None
self._csettings.on_url = cb_on_url
self._csettings.on_status = cb_on_status
self._csettings.on_header_field = cb_on_header_field
self._csettings.on_header_value = cb_on_header_value
self._csettings.on_headers_complete = cb_on_headers_complete
self._csettings.on_body = cb_on_body
self._csettings.on_message_begin = cb_on_message_begin
self._csettings.on_message_complete = cb_on_message_complete
self._csettings.on_chunk_header = cb_on_chunk_header
self._csettings.on_chunk_complete = cb_on_chunk_complete
self._last_error = None
cdef _process_header(self):
if self._raw_name:
raw_name = bytes(self._raw_name)
raw_value = bytes(self._raw_value)
name = find_header(raw_name)
value = raw_value.decode('utf-8', 'surrogateescape')
self._headers.add(name, value)
if name is CONTENT_ENCODING:
self._content_encoding = value
PyByteArray_Resize(self._raw_name, 0)
PyByteArray_Resize(self._raw_value, 0)
self._has_value = False
self._raw_headers.append((raw_name, raw_value))
cdef _on_header_field(self, char* at, size_t length):
cdef Py_ssize_t size
cdef char *buf
if self._has_value:
self._process_header()
size = PyByteArray_Size(self._raw_name)
PyByteArray_Resize(self._raw_name, size + length)
buf = PyByteArray_AsString(self._raw_name)
memcpy(buf + size, at, length)
cdef _on_header_value(self, char* at, size_t length):
cdef Py_ssize_t size
cdef char *buf
size = PyByteArray_Size(self._raw_value)
PyByteArray_Resize(self._raw_value, size + length)
buf = PyByteArray_AsString(self._raw_value)
memcpy(buf + size, at, length)
self._has_value = True
cdef _on_headers_complete(self):
self._process_header()
method = http_method_str(self._cparser.method)
should_close = not cparser.http_should_keep_alive(self._cparser)
upgrade = self._cparser.upgrade
chunked = self._cparser.flags & cparser.F_CHUNKED
raw_headers = tuple(self._raw_headers)
headers = CIMultiDictProxy(self._headers)
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
self._upgraded = True
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
encoding = None
enc = self._content_encoding
if enc is not None:
self._content_encoding = None
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
encoding = enc
if self._cparser.type == cparser.HTTP_REQUEST:
msg = _new_request_message(
method, self._path,
self.http_version(), headers, raw_headers,
should_close, encoding, upgrade, chunked, self._url)
else:
msg = _new_response_message(
self.http_version(), self._cparser.status_code, self._reason,
headers, raw_headers, should_close, encoding,
upgrade, chunked)
if (self._cparser.content_length > 0 or chunked or
self._cparser.method == 5): # CONNECT: 5
payload = StreamReader(
self._protocol, timer=self._timer, loop=self._loop)
else:
payload = EMPTY_PAYLOAD
self._payload = payload
if encoding is not None and self._auto_decompress:
self._payload = DeflateBuffer(payload, encoding)
if not self._response_with_body:
payload = EMPTY_PAYLOAD
self._messages.append((msg, payload))
cdef _on_message_complete(self):
self._payload.feed_eof()
self._payload = None
cdef _on_chunk_header(self):
self._payload.begin_http_chunk_receiving()
cdef _on_chunk_complete(self):
self._payload.end_http_chunk_receiving()
cdef object _on_status_complete(self):
pass
cdef inline http_version(self):
cdef cparser.http_parser* parser = self._cparser
if parser.http_major == 1:
if parser.http_minor == 0:
return HttpVersion10
elif parser.http_minor == 1:
return HttpVersion11
return HttpVersion(parser.http_major, parser.http_minor)
### Public API ###
def feed_eof(self):
cdef bytes desc
if self._payload is not None:
if self._cparser.flags & cparser.F_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
elif self._cparser.http_errno != cparser.HPE_OK:
desc = cparser.http_errno_description(
<cparser.http_errno> self._cparser.http_errno)
raise PayloadEncodingError(desc.decode('latin-1'))
else:
self._payload.feed_eof()
elif self._started:
self._on_headers_complete()
if self._messages:
return self._messages[-1][0]
def feed_data(self, data):
cdef:
size_t data_len
size_t nb
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
data_len = <size_t>self.py_buf.len
nb = cparser.http_parser_execute(
self._cparser,
self._csettings,
<char*>self.py_buf.buf,
data_len)
PyBuffer_Release(&self.py_buf)
# i am not sure about cparser.HPE_INVALID_METHOD,
# seems get err for valid request
# test_client_functional.py::test_post_data_with_bytesio_file
if (self._cparser.http_errno != cparser.HPE_OK and
(self._cparser.http_errno != cparser.HPE_INVALID_METHOD or
self._cparser.method == 0)):
if self._payload_error == 0:
if self._last_error is not None:
ex = self._last_error
self._last_error = None
else:
ex = parser_error_from_errno(
<cparser.http_errno> self._cparser.http_errno)
self._payload = None
raise ex
if self._messages:
messages = self._messages
self._messages = []
else:
messages = ()
if self._upgraded:
return messages, True, data[nb:]
else:
return messages, False, b''
cdef class HttpRequestParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False):
self._init(cparser.HTTP_REQUEST, protocol, loop, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body)
cdef object _on_status_complete(self):
cdef Py_buffer py_buf
if not self._buf:
return
self._path = self._buf.decode('utf-8', 'surrogateescape')
if self._cparser.method == 5: # CONNECT
self._url = URL(self._path)
else:
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
try:
self._url = _parse_url(<char*>py_buf.buf,
py_buf.len)
finally:
PyBuffer_Release(&py_buf)
PyByteArray_Resize(self._buf, 0)
cdef class HttpResponseParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True):
self._init(cparser.HTTP_RESPONSE, protocol, loop, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, auto_decompress)
cdef object _on_status_complete(self):
if self._buf:
self._reason = self._buf.decode('utf-8', 'surrogateescape')
PyByteArray_Resize(self._buf, 0)
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
pyparser._started = True
pyparser._headers = CIMultiDict()
pyparser._raw_headers = []
PyByteArray_Resize(pyparser._buf, 0)
pyparser._path = None
pyparser._reason = None
return 0
cdef int cb_on_url(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
if length > pyparser._max_line_size:
raise LineTooLong(
'Status line is too long', pyparser._max_line_size, length)
extend(pyparser._buf, at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_status(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef str reason
try:
if length > pyparser._max_line_size:
raise LineTooLong(
'Status line is too long', pyparser._max_line_size, length)
extend(pyparser._buf, at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_header_field(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
try:
pyparser._on_status_complete()
size = len(pyparser._raw_name) + length
if size > pyparser._max_field_size:
raise LineTooLong(
'Header name is too long', pyparser._max_field_size, size)
pyparser._on_header_field(at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_header_value(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
try:
size = len(pyparser._raw_value) + length
if size > pyparser._max_field_size:
raise LineTooLong(
'Header value is too long', pyparser._max_field_size, size)
pyparser._on_header_value(at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_status_complete()
pyparser._on_headers_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
return 2
else:
return 0
cdef int cb_on_body(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef bytes body = at[:length]
try:
pyparser._payload.feed_data(body, length)
except BaseException as exc:
if pyparser._payload_exception is not None:
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
else:
pyparser._payload.set_exception(exc)
pyparser._payload_error = 1
return -1
else:
return 0
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._started = False
pyparser._on_message_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_header()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef parser_error_from_errno(cparser.http_errno errno):
cdef bytes desc = cparser.http_errno_description(errno)
if errno in (cparser.HPE_CB_message_begin,
cparser.HPE_CB_url,
cparser.HPE_CB_header_field,
cparser.HPE_CB_header_value,
cparser.HPE_CB_headers_complete,
cparser.HPE_CB_body,
cparser.HPE_CB_message_complete,
cparser.HPE_CB_status,
cparser.HPE_CB_chunk_header,
cparser.HPE_CB_chunk_complete):
cls = BadHttpMessage
elif errno == cparser.HPE_INVALID_STATUS:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_METHOD:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_URL:
cls = InvalidURLError
else:
cls = BadHttpMessage
return cls(desc.decode('latin-1'))
def parse_url(url):
cdef:
Py_buffer py_buf
char* buf_data
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
try:
buf_data = <char*>py_buf.buf
return _parse_url(buf_data, py_buf.len)
finally:
PyBuffer_Release(&py_buf)
cdef _parse_url(char* buf_data, size_t length):
cdef:
cparser.http_parser_url* parsed
int res
str schema = None
str host = None
object port = None
str path = None
str query = None
str fragment = None
str user = None
str password = None
str userinfo = None
object result = None
int off
int ln
parsed = <cparser.http_parser_url*> \
PyMem_Malloc(sizeof(cparser.http_parser_url))
if parsed is NULL:
raise MemoryError()
cparser.http_parser_url_init(parsed)
try:
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
if res == 0:
if parsed.field_set & (1 << cparser.UF_SCHEMA):
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
schema = ''
if parsed.field_set & (1 << cparser.UF_HOST):
off = parsed.field_data[<int>cparser.UF_HOST].off
ln = parsed.field_data[<int>cparser.UF_HOST].len
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
host = ''
if parsed.field_set & (1 << cparser.UF_PORT):
port = parsed.port
if parsed.field_set & (1 << cparser.UF_PATH):
off = parsed.field_data[<int>cparser.UF_PATH].off
ln = parsed.field_data[<int>cparser.UF_PATH].len
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
path = ''
if parsed.field_set & (1 << cparser.UF_QUERY):
off = parsed.field_data[<int>cparser.UF_QUERY].off
ln = parsed.field_data[<int>cparser.UF_QUERY].len
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
query = ''
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
fragment = ''
if parsed.field_set & (1 << cparser.UF_USERINFO):
off = parsed.field_data[<int>cparser.UF_USERINFO].off
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
user, sep, password = userinfo.partition(':')
return URL_build(scheme=schema,
user=user, password=password, host=host, port=port,
path=path, query=query, fragment=fragment)
else:
raise InvalidURLError("invalid url {!r}".format(buf_data))
finally:
PyMem_Free(parsed)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,152 @@
from libc.stdint cimport uint8_t, uint64_t
from libc.string cimport memcpy
from cpython.exc cimport PyErr_NoMemory
from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free
from cpython.bytes cimport PyBytes_FromStringAndSize
from cpython.object cimport PyObject_Str
from multidict import istr
DEF BUF_SIZE = 16 * 1024 # 16KiB
cdef char BUFFER[BUF_SIZE]
cdef object _istr = istr
# ----------------- writer ---------------------------
cdef struct Writer:
char *buf
Py_ssize_t size
Py_ssize_t pos
cdef inline void _init_writer(Writer* writer):
writer.buf = &BUFFER[0]
writer.size = BUF_SIZE
writer.pos = 0
cdef inline void _release_writer(Writer* writer):
if writer.buf != BUFFER:
PyMem_Free(writer.buf)
cdef inline int _write_byte(Writer* writer, uint8_t ch):
cdef char * buf
cdef Py_ssize_t size
if writer.pos == writer.size:
# reallocate
size = writer.size + BUF_SIZE
if writer.buf == BUFFER:
buf = <char*>PyMem_Malloc(size)
if buf == NULL:
PyErr_NoMemory()
return -1
memcpy(buf, writer.buf, writer.size)
else:
buf = <char*>PyMem_Realloc(writer.buf, size)
if buf == NULL:
PyErr_NoMemory()
return -1
writer.buf = buf
writer.size = size
writer.buf[writer.pos] = <char>ch
writer.pos += 1
return 0
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
cdef uint64_t utf = <uint64_t> symbol
if utf < 0x80:
return _write_byte(writer, <uint8_t>utf)
elif utf < 0x800:
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
return -1
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
elif 0xD800 <= utf <= 0xDFFF:
# surogate pair, ignored
return 0
elif utf < 0x10000:
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
return -1
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
return -1
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
elif utf > 0x10FFFF:
# symbol is too large
return 0
else:
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
return -1
if _write_byte(writer,
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
return -1
if _write_byte(writer,
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
return -1
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
cdef inline int _write_str(Writer* writer, str s):
cdef Py_UCS4 ch
for ch in s:
if _write_utf8(writer, ch) < 0:
return -1
# --------------- _serialize_headers ----------------------
cdef str to_str(object s):
typ = type(s)
if typ is str:
return <str>s
elif typ is _istr:
return PyObject_Str(s)
elif not isinstance(s, str):
raise TypeError("Cannot serialize non-str key {!r}".format(s))
else:
return str(s)
def _serialize_headers(str status_line, headers):
cdef Writer writer
cdef object key
cdef object val
cdef bytes ret
_init_writer(&writer)
try:
if _write_str(&writer, status_line) < 0:
raise
if _write_byte(&writer, '\r') < 0:
raise
if _write_byte(&writer, '\n') < 0:
raise
for key, val in headers.items():
if _write_str(&writer, to_str(key)) < 0:
raise
if _write_byte(&writer, ':') < 0:
raise
if _write_byte(&writer, ' ') < 0:
raise
if _write_str(&writer, to_str(val)) < 0:
raise
if _write_byte(&writer, '\r') < 0:
raise
if _write_byte(&writer, '\n') < 0:
raise
if _write_byte(&writer, '\r') < 0:
raise
if _write_byte(&writer, '\n') < 0:
raise
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
finally:
_release_writer(&writer)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,54 @@
from cpython cimport PyBytes_AsString
#from cpython cimport PyByteArray_AsString # cython still not exports that
cdef extern from "Python.h":
char* PyByteArray_AsString(bytearray ba) except NULL
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
def _websocket_mask_cython(object mask, object data):
"""Note, this function mutates its `data` argument
"""
cdef:
Py_ssize_t data_len, i
# bit operations on signed integers are implementation-specific
unsigned char * in_buf
const unsigned char * mask_buf
uint32_t uint32_msk
uint64_t uint64_msk
assert len(mask) == 4
if not isinstance(mask, bytes):
mask = bytes(mask)
if isinstance(data, bytearray):
data = <bytearray>data
else:
data = bytearray(data)
data_len = len(data)
in_buf = <unsigned char*>PyByteArray_AsString(data)
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
uint32_msk = (<uint32_t*>mask_buf)[0]
# TODO: align in_data ptr to achieve even faster speeds
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
if sizeof(size_t) >= 8:
uint64_msk = uint32_msk
uint64_msk = (uint64_msk << 32) | uint32_msk
while data_len >= 8:
(<uint64_t*>in_buf)[0] ^= uint64_msk
in_buf += 8
data_len -= 8
while data_len >= 4:
(<uint32_t*>in_buf)[0] ^= uint32_msk
in_buf += 4
data_len -= 4
for i in range(0, data_len):
in_buf[i] ^= mask_buf[i]

View file

@ -0,0 +1,208 @@
import asyncio
import logging
from abc import ABC, abstractmethod
from collections.abc import Sized
from http.cookies import BaseCookie, Morsel # noqa
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
Generator,
Iterable,
List,
Optional,
Tuple,
)
from multidict import CIMultiDict # noqa
from yarl import URL
from .helpers import get_running_loop
from .typedefs import LooseCookies
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
from .web_app import Application
from .web_exceptions import HTTPException
else:
BaseRequest = Request = Application = StreamResponse = None
HTTPException = None
class AbstractRouter(ABC):
def __init__(self) -> None:
self._frozen = False
def post_init(self, app: Application) -> None:
"""Post init stage.
Not an abstract method for sake of backward compatibility,
but if the router wants to be aware of the application
it can override this.
"""
@property
def frozen(self) -> bool:
return self._frozen
def freeze(self) -> None:
"""Freeze router."""
self._frozen = True
@abstractmethod
async def resolve(self, request: Request) -> 'AbstractMatchInfo':
"""Return MATCH_INFO for given request"""
class AbstractMatchInfo(ABC):
@property # pragma: no branch
@abstractmethod
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
"""Execute matched request handler"""
@property
@abstractmethod
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
"""Expect handler for 100-continue processing"""
@property # pragma: no branch
@abstractmethod
def http_exception(self) -> Optional[HTTPException]:
"""HTTPException instance raised on router's resolving, or None"""
@abstractmethod # pragma: no branch
def get_info(self) -> Dict[str, Any]:
"""Return a dict with additional info useful for introspection"""
@property # pragma: no branch
@abstractmethod
def apps(self) -> Tuple[Application, ...]:
"""Stack of nested applications.
Top level application is left-most element.
"""
@abstractmethod
def add_app(self, app: Application) -> None:
"""Add application to the nested apps stack."""
@abstractmethod
def freeze(self) -> None:
"""Freeze the match info.
The method is called after route resolution.
After the call .add_app() is forbidden.
"""
class AbstractView(ABC):
"""Abstract class based view."""
def __init__(self, request: Request) -> None:
self._request = request
@property
def request(self) -> Request:
"""Request instance."""
return self._request
@abstractmethod
def __await__(self) -> Generator[Any, None, StreamResponse]:
"""Execute the view handler."""
class AbstractResolver(ABC):
"""Abstract DNS resolver."""
@abstractmethod
async def resolve(self, host: str,
port: int, family: int) -> List[Dict[str, Any]]:
"""Return IP address for given hostname"""
@abstractmethod
async def close(self) -> None:
"""Release resolver"""
if TYPE_CHECKING: # pragma: no cover
IterableBase = Iterable[Morsel[str]]
else:
IterableBase = Iterable
class AbstractCookieJar(Sized, IterableBase):
"""Abstract Cookie Jar."""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
self._loop = get_running_loop(loop)
@abstractmethod
def clear(self) -> None:
"""Clear all cookies."""
@abstractmethod
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
"""Update cookies."""
@abstractmethod
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
"""Return the jar's cookies filtered by their attributes."""
class AbstractStreamWriter(ABC):
"""Abstract stream writer."""
buffer_size = 0
output_size = 0
length = 0 # type: Optional[int]
@abstractmethod
async def write(self, chunk: bytes) -> None:
"""Write chunk into stream."""
@abstractmethod
async def write_eof(self, chunk: bytes=b'') -> None:
"""Write last chunk."""
@abstractmethod
async def drain(self) -> None:
"""Flush the write buffer."""
@abstractmethod
def enable_compression(self, encoding: str='deflate') -> None:
"""Enable HTTP body compression"""
@abstractmethod
def enable_chunking(self) -> None:
"""Enable HTTP chunked mode"""
@abstractmethod
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
"""Write HTTP headers"""
class AbstractAccessLogger(ABC):
"""Abstract writer to access log."""
def __init__(self, logger: logging.Logger, log_format: str) -> None:
self.logger = logger
self.log_format = log_format
@abstractmethod
def log(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
"""Emit log to logger."""

View file

@ -0,0 +1,81 @@
import asyncio
from typing import Optional, cast
from .tcp_helpers import tcp_nodelay
class BaseProtocol(asyncio.Protocol):
__slots__ = ('_loop', '_paused', '_drain_waiter',
'_connection_lost', 'transport')
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop # type: asyncio.AbstractEventLoop
self._paused = False
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
self._connection_lost = False
self._reading_paused = False
self.transport = None # type: Optional[asyncio.Transport]
def pause_writing(self) -> None:
assert not self._paused
self._paused = True
def resume_writing(self) -> None:
assert self._paused
self._paused = False
waiter = self._drain_waiter
if waiter is not None:
self._drain_waiter = None
if not waiter.done():
waiter.set_result(None)
def pause_reading(self) -> None:
if not self._reading_paused and self.transport is not None:
try:
self.transport.pause_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = True
def resume_reading(self) -> None:
if self._reading_paused and self.transport is not None:
try:
self.transport.resume_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = False
def connection_made(self, transport: asyncio.BaseTransport) -> None:
tr = cast(asyncio.Transport, transport)
tcp_nodelay(tr, True)
self.transport = tr
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._connection_lost = True
# Wake up the writer if currently paused.
self.transport = None
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
return
self._drain_waiter = None
if waiter.done():
return
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
async def _drain_helper(self) -> None:
if self._connection_lost:
raise ConnectionResetError('Connection lost')
if not self._paused:
return
waiter = self._drain_waiter
assert waiter is None or waiter.cancelled()
waiter = self._loop.create_future()
self._drain_waiter = waiter
await waiter

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,268 @@
"""HTTP related errors."""
import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
from .typedefs import _CIMultiDict
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa
Fingerprint)
else:
RequestInfo = ClientResponse = ConnectionKey = None
__all__ = (
'ClientError',
'ClientConnectionError',
'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',
'ClientSSLError',
'ClientConnectorSSLError', 'ClientConnectorCertificateError',
'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ClientResponseError', 'ClientHttpProxyError',
'WSServerHandshakeError', 'ContentTypeError',
'ClientPayloadError', 'InvalidURL')
class ClientError(Exception):
"""Base class for client connection errors."""
class ClientResponseError(ClientError):
"""Connection error during reading response.
request_info: instance of RequestInfo
"""
def __init__(self, request_info: RequestInfo,
history: Tuple[ClientResponse, ...], *,
code: Optional[int]=None,
status: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
self.request_info = request_info
if code is not None:
if status is not None:
raise ValueError(
"Both code and status arguments are provided; "
"code is deprecated, use status instead")
warnings.warn("code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
if status is not None:
self.status = status
elif code is not None:
self.status = code
else:
self.status = 0
self.message = message
self.headers = headers
self.history = history
super().__init__("%s, message='%s'" % (self.status, message))
@property
def code(self) -> int:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
return self.status
@code.setter
def code(self, value: int) -> None:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
self.status = value
class ContentTypeError(ClientResponseError):
"""ContentType found is not valid."""
class WSServerHandshakeError(ClientResponseError):
"""websocket server handshake error."""
class ClientHttpProxyError(ClientResponseError):
"""HTTP proxy error.
Raised in :class:`aiohttp.connector.TCPConnector` if
proxy responds with status other than ``200 OK``
on ``CONNECT`` request.
"""
class TooManyRedirects(ClientResponseError):
"""Client was redirected too many times."""
class ClientConnectionError(ClientError):
"""Base class for client socket errors."""
class ClientOSError(ClientConnectionError, OSError):
"""OSError error."""
class ClientConnectorError(ClientOSError):
"""Client connector error.
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
def __init__(self, connection_key: ConnectionKey,
os_error: OSError) -> None:
self._conn_key = connection_key
self._os_error = os_error
super().__init__(os_error.errno, os_error.strerror)
@property
def os_error(self) -> OSError:
return self._os_error
@property
def host(self) -> str:
return self._conn_key.host
@property
def port(self) -> Optional[int]:
return self._conn_key.port
@property
def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:
return self._conn_key.ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]'
.format(self, self.strerror))
class ClientProxyConnectionError(ClientConnectorError):
"""Proxy connection error.
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
class ServerConnectionError(ClientConnectionError):
"""Server connection errors."""
class ServerDisconnectedError(ServerConnectionError):
"""Server disconnected."""
def __init__(self, message: Optional[str]=None) -> None:
self.message = message
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
"""Server timeout error."""
class ServerFingerprintMismatch(ServerConnectionError):
"""SSL certificate does not match expected fingerprint."""
def __init__(self, expected: bytes, got: bytes,
host: str, port: int) -> None:
self.expected = expected
self.got = got
self.host = host
self.port = port
def __repr__(self) -> str:
return '<{} expected={} got={} host={} port={}>'.format(
self.__class__.__name__, self.expected, self.got,
self.host, self.port)
class ClientPayloadError(ClientError):
"""Response payload error."""
class InvalidURL(ClientError, ValueError):
"""Invalid URL.
URL used for fetching is malformed, e.g. it doesn't contains host
part."""
# Derive from ValueError for backward compatibility
def __init__(self, url: Any) -> None:
# The type of url is not yarl.URL because the exception can be raised
# on URL(url) call
super().__init__(url)
@property
def url(self) -> Any:
return self.args[0]
def __repr__(self) -> str:
return '<{} {}>'.format(self.__class__.__name__, self.url)
class ClientSSLError(ClientConnectorError):
"""Base error for ssl.*Errors."""
if ssl is not None:
cert_errors = (ssl.CertificateError,)
cert_errors_bases = (ClientSSLError, ssl.CertificateError,)
ssl_errors = (ssl.SSLError,)
ssl_error_bases = (ClientSSLError, ssl.SSLError)
else: # pragma: no cover
cert_errors = tuple()
cert_errors_bases = (ClientSSLError, ValueError,)
ssl_errors = tuple()
ssl_error_bases = (ClientSSLError,)
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
"""Response ssl error."""
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
"""Response certificate error."""
def __init__(self, connection_key:
ConnectionKey, certificate_error: Exception) -> None:
self._conn_key = connection_key
self._certificate_error = certificate_error
@property
def certificate_error(self) -> Exception:
return self._certificate_error
@property
def host(self) -> str:
return self._conn_key.host
@property
def port(self) -> Optional[int]:
return self._conn_key.port
@property
def ssl(self) -> bool:
return self._conn_key.is_ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '
'[{0.certificate_error.__class__.__name__}: '
'{0.certificate_error.args}]'.format(self))

View file

@ -0,0 +1,237 @@
import asyncio
from contextlib import suppress
from typing import Any, Optional, Tuple
from .base_protocol import BaseProtocol
from .client_exceptions import (
ClientOSError,
ClientPayloadError,
ServerDisconnectedError,
ServerTimeoutError,
)
from .helpers import BaseTimerContext
from .http import HttpResponseParser, RawResponseMessage
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
class ResponseHandler(BaseProtocol,
DataQueue[Tuple[RawResponseMessage, StreamReader]]):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self,
loop: asyncio.AbstractEventLoop) -> None:
BaseProtocol.__init__(self, loop=loop)
DataQueue.__init__(self, loop)
self._should_close = False
self._payload = None
self._skip_payload = False
self._payload_parser = None
self._timer = None
self._tail = b''
self._upgraded = False
self._parser = None # type: Optional[HttpResponseParser]
self._read_timeout = None # type: Optional[float]
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
@property
def upgraded(self) -> bool:
return self._upgraded
@property
def should_close(self) -> bool:
if (self._payload is not None and
not self._payload.is_eof() or self._upgraded):
return True
return (self._should_close or self._upgraded or
self.exception() is not None or
self._payload_parser is not None or
len(self) > 0 or bool(self._tail))
def force_close(self) -> None:
self._should_close = True
def close(self) -> None:
transport = self.transport
if transport is not None:
transport.close()
self.transport = None
self._payload = None
self._drop_timeout()
def is_connected(self) -> bool:
return self.transport is not None
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._drop_timeout()
if self._payload_parser is not None:
with suppress(Exception):
self._payload_parser.feed_eof()
uncompleted = None
if self._parser is not None:
try:
uncompleted = self._parser.feed_eof()
except Exception:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError(
'Response payload is not completed'))
if not self.is_eof():
if isinstance(exc, OSError):
exc = ClientOSError(*exc.args)
if exc is None:
exc = ServerDisconnectedError(uncompleted)
# assigns self._should_close to True as side effect,
# we do it anyway below
self.set_exception(exc)
self._should_close = True
self._parser = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
super().connection_lost(exc)
def eof_received(self) -> None:
# should call parser.feed_eof() most likely
self._drop_timeout()
def pause_reading(self) -> None:
super().pause_reading()
self._drop_timeout()
def resume_reading(self) -> None:
super().resume_reading()
self._reschedule_timeout()
def set_exception(self, exc: BaseException) -> None:
self._should_close = True
self._drop_timeout()
super().set_exception(exc)
def set_parser(self, parser: Any, payload: Any) -> None:
# TODO: actual types are:
# parser: WebSocketReader
# payload: FlowControlDataQueue
# but they are not generi enough
# Need an ABC for both types
self._payload = payload
self._payload_parser = parser
self._drop_timeout()
if self._tail:
data, self._tail = self._tail, b''
self.data_received(data)
def set_response_params(self, *, timer: BaseTimerContext=None,
skip_payload: bool=False,
read_until_eof: bool=False,
auto_decompress: bool=True,
read_timeout: Optional[float]=None) -> None:
self._skip_payload = skip_payload
self._read_timeout = read_timeout
self._reschedule_timeout()
self._parser = HttpResponseParser(
self, self._loop, timer=timer,
payload_exception=ClientPayloadError,
read_until_eof=read_until_eof,
auto_decompress=auto_decompress)
if self._tail:
data, self._tail = self._tail, b''
self.data_received(data)
def _drop_timeout(self) -> None:
if self._read_timeout_handle is not None:
self._read_timeout_handle.cancel()
self._read_timeout_handle = None
def _reschedule_timeout(self) -> None:
timeout = self._read_timeout
if self._read_timeout_handle is not None:
self._read_timeout_handle.cancel()
if timeout:
self._read_timeout_handle = self._loop.call_later(
timeout, self._on_read_timeout)
else:
self._read_timeout_handle = None
def _on_read_timeout(self) -> None:
exc = ServerTimeoutError("Timeout on reading data from socket")
self.set_exception(exc)
if self._payload is not None:
self._payload.set_exception(exc)
def data_received(self, data: bytes) -> None:
if not data:
return
# custom payload parser
if self._payload_parser is not None:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self._payload = None
self._payload_parser = None
if tail:
self.data_received(tail)
return
else:
if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data
else:
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as exc:
if self.transport is not None:
# connection.release() could be called BEFORE
# data_received(), the transport is already
# closed in this case
self.transport.close()
# should_close is True after the call
self.set_exception(exc)
return
self._upgraded = upgraded
payload = None
for message, payload in messages:
if message.should_close:
self._should_close = True
self._payload = payload
if self._skip_payload or message.code in (204, 304):
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore # noqa
else:
self.feed_data((message, payload), 0)
if payload is not None:
# new message(s) was processed
# register timeout handler unsubscribing
# either on end-of-stream or immediately for
# EMPTY_PAYLOAD
if payload is not EMPTY_PAYLOAD:
payload.on_eof(self._drop_timeout)
else:
self._drop_timeout()
if tail:
if upgraded:
self.data_received(tail)
else:
self._tail = tail

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,301 @@
"""WebSocket client for asyncio."""
import asyncio
from typing import Any, Optional
import async_timeout
from .client_exceptions import ClientError
from .client_reqrep import ClientResponse
from .helpers import call_later, set_result
from .http import (
WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE,
WebSocketError,
WSMessage,
WSMsgType,
)
from .http_websocket import WebSocketWriter # WSMessage
from .streams import EofStream, FlowControlDataQueue # noqa
from .typedefs import (
DEFAULT_JSON_DECODER,
DEFAULT_JSON_ENCODER,
JSONDecoder,
JSONEncoder,
)
class ClientWebSocketResponse:
def __init__(self,
reader: 'FlowControlDataQueue[WSMessage]',
writer: WebSocketWriter,
protocol: Optional[str],
response: ClientResponse,
timeout: float,
autoclose: bool,
autoping: bool,
loop: asyncio.AbstractEventLoop,
*,
receive_timeout: Optional[float]=None,
heartbeat: Optional[float]=None,
compress: int=0,
client_notakeover: bool=False) -> None:
self._response = response
self._conn = response.connection
self._writer = writer
self._reader = reader
self._protocol = protocol
self._closed = False
self._closing = False
self._close_code = None # type: Optional[int]
self._timeout = timeout
self._receive_timeout = receive_timeout
self._autoclose = autoclose
self._autoping = autoping
self._heartbeat = heartbeat
self._heartbeat_cb = None
if heartbeat is not None:
self._pong_heartbeat = heartbeat / 2.0
self._pong_response_cb = None
self._loop = loop
self._waiting = None # type: Optional[asyncio.Future[bool]]
self._exception = None # type: Optional[BaseException]
self._compress = compress
self._client_notakeover = client_notakeover
self._reset_heartbeat()
def _cancel_heartbeat(self) -> None:
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = None
if self._heartbeat_cb is not None:
self._heartbeat_cb.cancel()
self._heartbeat_cb = None
def _reset_heartbeat(self) -> None:
self._cancel_heartbeat()
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
# fire-and-forget a task is not perfect but maybe ok for
# sending ping. Otherwise we need a long-living heartbeat
# task in the class.
self._loop.create_task(self._writer.ping())
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop)
def _pong_not_received(self) -> None:
if not self._closed:
self._closed = True
self._close_code = 1006
self._exception = asyncio.TimeoutError()
self._response.close()
@property
def closed(self) -> bool:
return self._closed
@property
def close_code(self) -> Optional[int]:
return self._close_code
@property
def protocol(self) -> Optional[str]:
return self._protocol
@property
def compress(self) -> int:
return self._compress
@property
def client_notakeover(self) -> bool:
return self._client_notakeover
def get_extra_info(self, name: str, default: Any=None) -> Any:
"""extra info from connection transport"""
conn = self._response.connection
if conn is None:
return default
transport = conn.transport
if transport is None:
return default
return transport.get_extra_info(name, default)
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes=b'') -> None:
await self._writer.ping(message)
async def pong(self, message: bytes=b'') -> None:
await self._writer.pong(message)
async def send_str(self, data: str,
compress: Optional[int]=None) -> None:
if not isinstance(data, str):
raise TypeError('data argument must be str (%r)' % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes,
compress: Optional[int]=None) -> None:
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)' %
type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(self, data: Any,
compress: Optional[int]=None,
*, dumps: JSONEncoder=DEFAULT_JSON_ENCODER) -> None:
await self.send_str(dumps(data), compress=compress)
async def close(self, *, code: int=1000, message: bytes=b'') -> bool:
# we need to break `receive()` cycle first,
# `close()` may be called from different task
if self._waiting is not None and not self._closed:
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
await self._waiting
if not self._closed:
self._cancel_heartbeat()
self._closed = True
try:
await self._writer.close(code, message)
except asyncio.CancelledError:
self._close_code = 1006
self._response.close()
raise
except Exception as exc:
self._close_code = 1006
self._exception = exc
self._response.close()
return True
if self._closing:
self._response.close()
return True
while True:
try:
with async_timeout.timeout(self._timeout, loop=self._loop):
msg = await self._reader.read()
except asyncio.CancelledError:
self._close_code = 1006
self._response.close()
raise
except Exception as exc:
self._close_code = 1006
self._exception = exc
self._response.close()
return True
if msg.type == WSMsgType.CLOSE:
self._close_code = msg.data
self._response.close()
return True
else:
return False
async def receive(self, timeout: Optional[float]=None) -> WSMessage:
while True:
if self._waiting is not None:
raise RuntimeError(
'Concurrent call to receive() is not allowed')
if self._closed:
return WS_CLOSED_MESSAGE
elif self._closing:
await self.close()
return WS_CLOSED_MESSAGE
try:
self._waiting = self._loop.create_future()
try:
with async_timeout.timeout(
timeout or self._receive_timeout,
loop=self._loop):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
waiter = self._waiting
self._waiting = None
set_result(waiter, True)
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = 1006
raise
except EofStream:
self._close_code = 1000
await self.close()
return WSMessage(WSMsgType.CLOSED, None, None)
except ClientError:
self._closed = True
self._close_code = 1006
return WS_CLOSED_MESSAGE
except WebSocketError as exc:
self._close_code = exc.code
await self.close(code=exc.code)
return WSMessage(WSMsgType.ERROR, exc, None)
except Exception as exc:
self._exception = exc
self._closing = True
self._close_code = 1006
await self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
if msg.type == WSMsgType.CLOSE:
self._closing = True
self._close_code = msg.data
if not self._closed and self._autoclose:
await self.close()
elif msg.type == WSMsgType.CLOSING:
self._closing = True
elif msg.type == WSMsgType.PING and self._autoping:
await self.pong(msg.data)
continue
elif msg.type == WSMsgType.PONG and self._autoping:
continue
return msg
async def receive_str(self, *, timeout: Optional[float]=None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not str".format(msg.type,
msg.data))
return msg.data
async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(
"Received message {}:{!r} is not bytes".format(msg.type,
msg.data))
return msg.data
async def receive_json(self,
*, loads: JSONDecoder=DEFAULT_JSON_DECODER,
timeout: Optional[float]=None) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
def __aiter__(self) -> 'ClientWebSocketResponse':
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE,
WSMsgType.CLOSING,
WSMsgType.CLOSED):
raise StopAsyncIteration # NOQA
return msg

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,357 @@
import asyncio
import datetime
import os # noqa
import pathlib
import pickle
import re
from collections import defaultdict
from http.cookies import BaseCookie, Morsel, SimpleCookie # noqa
from math import ceil
from typing import ( # noqa
DefaultDict,
Dict,
Iterable,
Iterator,
Mapping,
Optional,
Set,
Tuple,
Union,
cast,
)
from yarl import URL
from .abc import AbstractCookieJar
from .helpers import is_ip_address
from .typedefs import LooseCookies, PathLike
__all__ = ('CookieJar', 'DummyCookieJar')
CookieItem = Union[str, 'Morsel[str]']
class CookieJar(AbstractCookieJar):
"""Implements cookie storage adhering to RFC 6265."""
DATE_TOKENS_RE = re.compile(
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)")
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
DATE_MONTH_RE = re.compile("(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|"
"(aug)|(sep)|(oct)|(nov)|(dec)", re.I)
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
MAX_TIME = 2051215261.0 # so far in future (2035-01-01)
def __init__(self, *, unsafe: bool=False,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
super().__init__(loop=loop)
self._cookies = defaultdict(SimpleCookie) #type: DefaultDict[str, SimpleCookie] # noqa
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
self._unsafe = unsafe
self._next_expiration = ceil(self._loop.time())
self._expirations = {} # type: Dict[Tuple[str, str], int]
def save(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode='wb') as f:
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
def load(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode='rb') as f:
self._cookies = pickle.load(f)
def clear(self) -> None:
self._cookies.clear()
self._host_only_cookies.clear()
self._next_expiration = ceil(self._loop.time())
self._expirations.clear()
def __iter__(self) -> 'Iterator[Morsel[str]]':
self._do_expiration()
for val in self._cookies.values():
yield from val.values()
def __len__(self) -> int:
return sum(1 for i in self)
def _do_expiration(self) -> None:
now = self._loop.time()
if self._next_expiration > now:
return
if not self._expirations:
return
next_expiration = self.MAX_TIME
to_del = []
cookies = self._cookies
expirations = self._expirations
for (domain, name), when in expirations.items():
if when <= now:
cookies[domain].pop(name, None)
to_del.append((domain, name))
self._host_only_cookies.discard((domain, name))
else:
next_expiration = min(next_expiration, when)
for key in to_del:
del expirations[key]
self._next_expiration = ceil(next_expiration)
def _expire_cookie(self, when: float, domain: str, name: str) -> None:
iwhen = int(when)
self._next_expiration = min(self._next_expiration, iwhen)
self._expirations[(domain, name)] = iwhen
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
"""Update cookies."""
hostname = response_url.raw_host
if not self._unsafe and is_ip_address(hostname):
# Don't accept cookies from IPs
return
if isinstance(cookies, Mapping):
cookies = cookies.items() # type: ignore
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
tmp = SimpleCookie()
tmp[name] = cookie # type: ignore
cookie = tmp[name]
domain = cookie["domain"]
# ignore domains with trailing dots
if domain.endswith('.'):
domain = ""
del cookie["domain"]
if not domain and hostname is not None:
# Set the cookie's domain to the response hostname
# and set its host-only-flag
self._host_only_cookies.add((hostname, name))
domain = cookie["domain"] = hostname
if domain.startswith("."):
# Remove leading dot
domain = domain[1:]
cookie["domain"] = domain
if hostname and not self._is_domain_match(domain, hostname):
# Setting cookies for different domains is not allowed
continue
path = cookie["path"]
if not path or not path.startswith("/"):
# Set the cookie's path to the response path
path = response_url.path
if not path.startswith("/"):
path = "/"
else:
# Cut everything from the last slash to the end
path = "/" + path[1:path.rfind("/")]
cookie["path"] = path
max_age = cookie["max-age"]
if max_age:
try:
delta_seconds = int(max_age)
self._expire_cookie(self._loop.time() + delta_seconds,
domain, name)
except ValueError:
cookie["max-age"] = ""
else:
expires = cookie["expires"]
if expires:
expire_time = self._parse_date(expires)
if expire_time:
self._expire_cookie(expire_time.timestamp(),
domain, name)
else:
cookie["expires"] = ""
self._cookies[domain][name] = cookie
self._do_expiration()
def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]':
"""Returns this jar's cookies filtered by their attributes."""
self._do_expiration()
request_url = URL(request_url)
filtered = SimpleCookie()
hostname = request_url.raw_host or ""
is_not_secure = request_url.scheme not in ("https", "wss")
for cookie in self:
name = cookie.key
domain = cookie["domain"]
# Send shared cookies
if not domain:
filtered[name] = cookie.value
continue
if not self._unsafe and is_ip_address(hostname):
continue
if (domain, name) in self._host_only_cookies:
if domain != hostname:
continue
elif not self._is_domain_match(domain, hostname):
continue
if not self._is_path_match(request_url.path, cookie["path"]):
continue
if is_not_secure and cookie["secure"]:
continue
# It's critical we use the Morsel so the coded_value
# (based on cookie version) is preserved
mrsl_val = cast('Morsel[str]', cookie.get(cookie.key, Morsel()))
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
filtered[name] = mrsl_val
return filtered
@staticmethod
def _is_domain_match(domain: str, hostname: str) -> bool:
"""Implements domain matching adhering to RFC 6265."""
if hostname == domain:
return True
if not hostname.endswith(domain):
return False
non_matching = hostname[:-len(domain)]
if not non_matching.endswith("."):
return False
return not is_ip_address(hostname)
@staticmethod
def _is_path_match(req_path: str, cookie_path: str) -> bool:
"""Implements path matching adhering to RFC 6265."""
if not req_path.startswith("/"):
req_path = "/"
if req_path == cookie_path:
return True
if not req_path.startswith(cookie_path):
return False
if cookie_path.endswith("/"):
return True
non_matching = req_path[len(cookie_path):]
return non_matching.startswith("/")
@classmethod
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
"""Implements date string parsing adhering to RFC 6265."""
if not date_str:
return None
found_time = False
found_day = False
found_month = False
found_year = False
hour = minute = second = 0
day = 0
month = 0
year = 0
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
token = token_match.group("token")
if not found_time:
time_match = cls.DATE_HMS_TIME_RE.match(token)
if time_match:
found_time = True
hour, minute, second = [
int(s) for s in time_match.groups()]
continue
if not found_day:
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
if day_match:
found_day = True
day = int(day_match.group())
continue
if not found_month:
month_match = cls.DATE_MONTH_RE.match(token)
if month_match:
found_month = True
month = month_match.lastindex
continue
if not found_year:
year_match = cls.DATE_YEAR_RE.match(token)
if year_match:
found_year = True
year = int(year_match.group())
if 70 <= year <= 99:
year += 1900
elif 0 <= year <= 69:
year += 2000
if False in (found_day, found_month, found_year, found_time):
return None
if not 1 <= day <= 31:
return None
if year < 1601 or hour > 23 or minute > 59 or second > 59:
return None
return datetime.datetime(year, month, day,
hour, minute, second,
tzinfo=datetime.timezone.utc)
class DummyCookieJar(AbstractCookieJar):
"""Implements a dummy cookie storage.
It can be used with the ClientSession when no cookie processing is needed.
"""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
super().__init__(loop=loop)
def __iter__(self) -> 'Iterator[Morsel[str]]':
while False:
yield None
def __len__(self) -> int:
return 0
def clear(self) -> None:
pass
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
pass
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
return SimpleCookie()

View file

@ -0,0 +1,150 @@
import io
from typing import Any, Iterable, List, Optional # noqa
from urllib.parse import urlencode
from multidict import MultiDict, MultiDictProxy
from . import hdrs, multipart, payload
from .helpers import guess_filename
from .payload import Payload
__all__ = ('FormData',)
class FormData:
"""Helper class for multipart/form-data and
application/x-www-form-urlencoded body generation."""
def __init__(self, fields:
Iterable[Any]=(),
quote_fields: bool=True,
charset: Optional[str]=None) -> None:
self._writer = multipart.MultipartWriter('form-data')
self._fields = [] # type: List[Any]
self._is_multipart = False
self._quote_fields = quote_fields
self._charset = charset
if isinstance(fields, dict):
fields = list(fields.items())
elif not isinstance(fields, (list, tuple)):
fields = (fields,)
self.add_fields(*fields)
@property
def is_multipart(self) -> bool:
return self._is_multipart
def add_field(self, name: str, value: Any, *,
content_type: Optional[str]=None,
filename: Optional[str]=None,
content_transfer_encoding: Optional[str]=None) -> None:
if isinstance(value, io.IOBase):
self._is_multipart = True
elif isinstance(value, (bytes, bytearray, memoryview)):
if filename is None and content_transfer_encoding is None:
filename = name
type_options = MultiDict({'name': name})
if filename is not None and not isinstance(filename, str):
raise TypeError('filename must be an instance of str. '
'Got: %s' % filename)
if filename is None and isinstance(value, io.IOBase):
filename = guess_filename(value, name)
if filename is not None:
type_options['filename'] = filename
self._is_multipart = True
headers = {}
if content_type is not None:
if not isinstance(content_type, str):
raise TypeError('content_type must be an instance of str. '
'Got: %s' % content_type)
headers[hdrs.CONTENT_TYPE] = content_type
self._is_multipart = True
if content_transfer_encoding is not None:
if not isinstance(content_transfer_encoding, str):
raise TypeError('content_transfer_encoding must be an instance'
' of str. Got: %s' % content_transfer_encoding)
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
self._is_multipart = True
self._fields.append((type_options, headers, value))
def add_fields(self, *fields: Any) -> None:
to_add = list(fields)
while to_add:
rec = to_add.pop(0)
if isinstance(rec, io.IOBase):
k = guess_filename(rec, 'unknown')
self.add_field(k, rec) # type: ignore
elif isinstance(rec, (MultiDictProxy, MultiDict)):
to_add.extend(rec.items())
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
k, fp = rec
self.add_field(k, fp) # type: ignore
else:
raise TypeError('Only io.IOBase, multidict and (name, file) '
'pairs allowed, use .add_field() for passing '
'more complex parameters, got {!r}'
.format(rec))
def _gen_form_urlencoded(self) -> payload.BytesPayload:
# form data (x-www-form-urlencoded)
data = []
for type_options, _, value in self._fields:
data.append((type_options['name'], value))
charset = self._charset if self._charset is not None else 'utf-8'
if charset == 'utf-8':
content_type = 'application/x-www-form-urlencoded'
else:
content_type = ('application/x-www-form-urlencoded; '
'charset=%s' % charset)
return payload.BytesPayload(
urlencode(data, doseq=True, encoding=charset).encode(),
content_type=content_type)
def _gen_form_data(self) -> multipart.MultipartWriter:
"""Encode a list of fields using the multipart/form-data MIME format"""
for dispparams, headers, value in self._fields:
try:
if hdrs.CONTENT_TYPE in headers:
part = payload.get_payload(
value, content_type=headers[hdrs.CONTENT_TYPE],
headers=headers, encoding=self._charset)
else:
part = payload.get_payload(
value, headers=headers, encoding=self._charset)
except Exception as exc:
raise TypeError(
'Can not serialize value type: %r\n '
'headers: %r\n value: %r' % (
type(value), headers, value)) from exc
if dispparams:
part.set_content_disposition(
'form-data', quote_fields=self._quote_fields, **dispparams
)
# FIXME cgi.FieldStorage doesn't likes body parts with
# Content-Length which were sent via chunked transfer encoding
assert part.headers is not None
part.headers.popall(hdrs.CONTENT_LENGTH, None)
self._writer.append_payload(part)
return self._writer
def __call__(self) -> Payload:
if self._is_multipart:
return self._gen_form_data()
else:
return self._gen_form_urlencoded()

View file

@ -0,0 +1,72 @@
from collections.abc import MutableSequence
from functools import total_ordering
from .helpers import NO_EXTENSIONS
@total_ordering
class FrozenList(MutableSequence):
__slots__ = ('_frozen', '_items')
def __init__(self, items=None):
self._frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
@property
def frozen(self):
return self._frozen
def freeze(self):
self._frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
self._items[index] = value
def __delitem__(self, index):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
del self._items[index]
def __len__(self):
return self._items.__len__()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __eq__(self, other):
return list(self) == other
def __le__(self, other):
return list(self) <= other
def insert(self, pos, item):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
self._items.insert(pos, item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self._frozen,
self._items)
PyFrozenList = FrozenList
try:
from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore
if not NO_EXTENSIONS:
FrozenList = CFrozenList # type: ignore
except ImportError: # pragma: no cover
pass

View file

@ -0,0 +1,54 @@
from typing import (Generic, Iterable, Iterator, List, MutableSequence,
Optional, TypeVar, Union, overload)
_T = TypeVar('_T')
_Arg = Union[List[_T], Iterable[_T]]
class FrozenList(MutableSequence[_T], Generic[_T]):
def __init__(self, items: Optional[_Arg[_T]]=None) -> None: ...
@property
def frozen(self) -> bool: ...
def freeze(self) -> None: ...
@overload
def __getitem__(self, i: int) -> _T: ...
@overload
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
@overload
def __setitem__(self, i: int, o: _T) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
@overload
def __delitem__(self, i: int) -> None: ...
@overload
def __delitem__(self, i: slice) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __reversed__(self) -> Iterator[_T]: ...
def __eq__(self, other: object) -> bool: ...
def __le__(self, other: FrozenList[_T]) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __lt__(self, other: FrozenList[_T]) -> bool: ...
def __ge__(self, other: FrozenList[_T]) -> bool: ...
def __gt__(self, other: FrozenList[_T]) -> bool: ...
def insert(self, pos: int, item: _T) -> None: ...
def __repr__(self) -> str: ...
# types for C accelerators are the same
CFrozenList = PyFrozenList = FrozenList

View file

@ -0,0 +1,100 @@
"""HTTP Headers constants."""
# After changing the file content call ./tools/gen.py
# to regenerate the headers parser
from multidict import istr
METH_ANY = '*'
METH_CONNECT = 'CONNECT'
METH_HEAD = 'HEAD'
METH_GET = 'GET'
METH_DELETE = 'DELETE'
METH_OPTIONS = 'OPTIONS'
METH_PATCH = 'PATCH'
METH_POST = 'POST'
METH_PUT = 'PUT'
METH_TRACE = 'TRACE'
METH_ALL = {METH_CONNECT, METH_HEAD, METH_GET, METH_DELETE,
METH_OPTIONS, METH_PATCH, METH_POST, METH_PUT, METH_TRACE}
ACCEPT = istr('Accept')
ACCEPT_CHARSET = istr('Accept-Charset')
ACCEPT_ENCODING = istr('Accept-Encoding')
ACCEPT_LANGUAGE = istr('Accept-Language')
ACCEPT_RANGES = istr('Accept-Ranges')
ACCESS_CONTROL_MAX_AGE = istr('Access-Control-Max-Age')
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr('Access-Control-Allow-Credentials')
ACCESS_CONTROL_ALLOW_HEADERS = istr('Access-Control-Allow-Headers')
ACCESS_CONTROL_ALLOW_METHODS = istr('Access-Control-Allow-Methods')
ACCESS_CONTROL_ALLOW_ORIGIN = istr('Access-Control-Allow-Origin')
ACCESS_CONTROL_EXPOSE_HEADERS = istr('Access-Control-Expose-Headers')
ACCESS_CONTROL_REQUEST_HEADERS = istr('Access-Control-Request-Headers')
ACCESS_CONTROL_REQUEST_METHOD = istr('Access-Control-Request-Method')
AGE = istr('Age')
ALLOW = istr('Allow')
AUTHORIZATION = istr('Authorization')
CACHE_CONTROL = istr('Cache-Control')
CONNECTION = istr('Connection')
CONTENT_DISPOSITION = istr('Content-Disposition')
CONTENT_ENCODING = istr('Content-Encoding')
CONTENT_LANGUAGE = istr('Content-Language')
CONTENT_LENGTH = istr('Content-Length')
CONTENT_LOCATION = istr('Content-Location')
CONTENT_MD5 = istr('Content-MD5')
CONTENT_RANGE = istr('Content-Range')
CONTENT_TRANSFER_ENCODING = istr('Content-Transfer-Encoding')
CONTENT_TYPE = istr('Content-Type')
COOKIE = istr('Cookie')
DATE = istr('Date')
DESTINATION = istr('Destination')
DIGEST = istr('Digest')
ETAG = istr('Etag')
EXPECT = istr('Expect')
EXPIRES = istr('Expires')
FORWARDED = istr('Forwarded')
FROM = istr('From')
HOST = istr('Host')
IF_MATCH = istr('If-Match')
IF_MODIFIED_SINCE = istr('If-Modified-Since')
IF_NONE_MATCH = istr('If-None-Match')
IF_RANGE = istr('If-Range')
IF_UNMODIFIED_SINCE = istr('If-Unmodified-Since')
KEEP_ALIVE = istr('Keep-Alive')
LAST_EVENT_ID = istr('Last-Event-ID')
LAST_MODIFIED = istr('Last-Modified')
LINK = istr('Link')
LOCATION = istr('Location')
MAX_FORWARDS = istr('Max-Forwards')
ORIGIN = istr('Origin')
PRAGMA = istr('Pragma')
PROXY_AUTHENTICATE = istr('Proxy-Authenticate')
PROXY_AUTHORIZATION = istr('Proxy-Authorization')
RANGE = istr('Range')
REFERER = istr('Referer')
RETRY_AFTER = istr('Retry-After')
SEC_WEBSOCKET_ACCEPT = istr('Sec-WebSocket-Accept')
SEC_WEBSOCKET_VERSION = istr('Sec-WebSocket-Version')
SEC_WEBSOCKET_PROTOCOL = istr('Sec-WebSocket-Protocol')
SEC_WEBSOCKET_EXTENSIONS = istr('Sec-WebSocket-Extensions')
SEC_WEBSOCKET_KEY = istr('Sec-WebSocket-Key')
SEC_WEBSOCKET_KEY1 = istr('Sec-WebSocket-Key1')
SERVER = istr('Server')
SET_COOKIE = istr('Set-Cookie')
TE = istr('TE')
TRAILER = istr('Trailer')
TRANSFER_ENCODING = istr('Transfer-Encoding')
UPGRADE = istr('Upgrade')
WEBSOCKET = istr('WebSocket')
URI = istr('URI')
USER_AGENT = istr('User-Agent')
VARY = istr('Vary')
VIA = istr('Via')
WANT_DIGEST = istr('Want-Digest')
WARNING = istr('Warning')
WWW_AUTHENTICATE = istr('WWW-Authenticate')
X_FORWARDED_FOR = istr('X-Forwarded-For')
X_FORWARDED_HOST = istr('X-Forwarded-Host')
X_FORWARDED_PROTO = istr('X-Forwarded-Proto')

View file

@ -0,0 +1,704 @@
"""Various helper functions"""
import asyncio
import base64
import binascii
import cgi
import functools
import inspect
import netrc
import os
import platform
import re
import sys
import time
import warnings
import weakref
from collections import namedtuple
from contextlib import suppress
from math import ceil
from pathlib import Path
from types import TracebackType
from typing import ( # noqa
Any,
Callable,
Dict,
Iterable,
Iterator,
List,
Mapping,
Optional,
Pattern,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from urllib.parse import quote
from urllib.request import getproxies
import async_timeout
import attr
from multidict import MultiDict, MultiDictProxy
from yarl import URL
from . import hdrs
from .log import client_logger, internal_logger
from .typedefs import PathLike # noqa
__all__ = ('BasicAuth', 'ChainMapProxy')
PY_36 = sys.version_info >= (3, 6)
PY_37 = sys.version_info >= (3, 7)
if not PY_37:
import idna_ssl
idna_ssl.patch_match_hostname()
try:
from typing import ContextManager
except ImportError:
from typing_extensions import ContextManager
def all_tasks(
loop: Optional[asyncio.AbstractEventLoop] = None
) -> Set['asyncio.Task[Any]']:
tasks = list(asyncio.Task.all_tasks(loop)) # type: ignore
return {t for t in tasks if not t.done()}
if PY_37:
all_tasks = getattr(asyncio, 'all_tasks') # noqa
_T = TypeVar('_T')
sentinel = object() # type: Any
NO_EXTENSIONS = bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')) # type: bool
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
# for compatibility with older versions
DEBUG = (getattr(sys.flags, 'dev_mode', False) or
(not sys.flags.ignore_environment and
bool(os.environ.get('PYTHONASYNCIODEBUG')))) # type: bool
CHAR = set(chr(i) for i in range(0, 128))
CTL = set(chr(i) for i in range(0, 32)) | {chr(127), }
SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']',
'?', '=', '{', '}', ' ', chr(9)}
TOKEN = CHAR ^ CTL ^ SEPARATORS
coroutines = asyncio.coroutines
old_debug = coroutines._DEBUG # type: ignore
# prevent "coroutine noop was never awaited" warning.
coroutines._DEBUG = False # type: ignore
@asyncio.coroutine
def noop(*args, **kwargs): # type: ignore
return # type: ignore
async def noop2(*args: Any, **kwargs: Any) -> None:
return
coroutines._DEBUG = old_debug # type: ignore
class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])):
"""Http basic authentication helper."""
def __new__(cls, login: str,
password: str='',
encoding: str='latin1') -> 'BasicAuth':
if login is None:
raise ValueError('None is not allowed as login value')
if password is None:
raise ValueError('None is not allowed as password value')
if ':' in login:
raise ValueError(
'A ":" is not allowed in login (RFC 1945#section-11.1)')
return super().__new__(cls, login, password, encoding)
@classmethod
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth':
"""Create a BasicAuth object from an Authorization HTTP header."""
try:
auth_type, encoded_credentials = auth_header.split(' ', 1)
except ValueError:
raise ValueError('Could not parse authorization header.')
if auth_type.lower() != 'basic':
raise ValueError('Unknown authorization method %s' % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode('ascii'), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError('Invalid base64 encoding.')
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(':', 1)
except ValueError:
raise ValueError('Invalid credentials.')
return cls(username, password, encoding=encoding)
@classmethod
def from_url(cls, url: URL,
*, encoding: str='latin1') -> Optional['BasicAuth']:
"""Create BasicAuth from url."""
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
if url.user is None:
return None
return cls(url.user, url.password or '', encoding=encoding)
def encode(self) -> str:
"""Encode credentials."""
creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding)
return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
auth = BasicAuth.from_url(url)
if auth is None:
return url, None
else:
return url.with_user(None), auth
def netrc_from_env() -> Optional[netrc.netrc]:
"""Attempt to load the netrc file from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get('NETRC')
if netrc_env is not None:
netrc_path = Path(netrc_env)
else:
try:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug('Could not resolve home directory when '
'trying to look for .netrc file: %s', e)
return None
netrc_path = home_dir / (
'_netrc' if platform.system() == 'Windows' else '.netrc')
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning('Could not parse .netrc file: %s', e)
except OSError as e:
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_path.is_file():
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning('Could not read .netrc file: %s', e)
return None
@attr.s(frozen=True, slots=True)
class ProxyInfo:
proxy = attr.ib(type=URL)
proxy_auth = attr.ib(type=Optional[BasicAuth])
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {k: URL(v) for k, v in getproxies().items()
if k in ('http', 'https')}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme == 'https':
client_logger.warning(
"HTTPS proxies %s are not supported, ignoring", proxy)
continue
if netrc_obj and auth is None:
auth_from_netrc = None
if proxy.host is not None:
auth_from_netrc = netrc_obj.authenticators(proxy.host)
if auth_from_netrc is not None:
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
# `user` and `account` both can be username,
# if `user` is None, use `account`
*logins, password = auth_from_netrc
login = logins[0] if logins[0] else logins[-1]
auth = BasicAuth(cast(str, login), cast(str, password))
ret[proto] = ProxyInfo(proxy, auth)
return ret
def current_task(loop: Optional[asyncio.AbstractEventLoop]=None) -> asyncio.Task: # type: ignore # noqa # Return type is intentionally Generic here
if PY_37:
return asyncio.current_task(loop=loop) # type: ignore
else:
return asyncio.Task.current_task(loop=loop) # type: ignore
def get_running_loop(
loop: Optional[asyncio.AbstractEventLoop]=None
) -> asyncio.AbstractEventLoop:
if loop is None:
loop = asyncio.get_event_loop()
if not loop.is_running():
warnings.warn("The object should be created from async function",
DeprecationWarning, stacklevel=3)
if loop.get_debug():
internal_logger.warning(
"The object should be created from async function",
stack_info=True)
return loop
def isasyncgenfunction(obj: Any) -> bool:
func = getattr(inspect, 'isasyncgenfunction', None)
if func is not None:
return func(obj)
else:
return False
@attr.s(frozen=True, slots=True)
class MimeType:
type = attr.ib(type=str)
subtype = attr.ib(type=str)
suffix = attr.ib(type=str)
parameters = attr.ib(type=MultiDictProxy) # type: MultiDictProxy[str]
@functools.lru_cache(maxsize=56)
def parse_mimetype(mimetype: str) -> MimeType:
"""Parses a MIME type into its components.
mimetype is a MIME type string.
Returns a MimeType object.
Example:
>>> parse_mimetype('text/html; charset=utf-8')
MimeType(type='text', subtype='html', suffix='',
parameters={'charset': 'utf-8'})
"""
if not mimetype:
return MimeType(type='', subtype='', suffix='',
parameters=MultiDictProxy(MultiDict()))
parts = mimetype.split(';')
params = MultiDict() # type: MultiDict[str]
for item in parts[1:]:
if not item:
continue
key, value = cast(Tuple[str, str],
item.split('=', 1) if '=' in item else (item, ''))
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == '*':
fulltype = '*/*'
mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1))
if '/' in fulltype else (fulltype, ''))
stype, suffix = (cast(Tuple[str, str], stype.split('+', 1))
if '+' in stype else (stype, ''))
return MimeType(type=mtype, subtype=stype, suffix=suffix,
parameters=MultiDictProxy(params))
def guess_filename(obj: Any, default: Optional[str]=None) -> Optional[str]:
name = getattr(obj, 'name', None)
if name and isinstance(name, str) and name[0] != '<' and name[-1] != '>':
return Path(name).name
return default
def content_disposition_header(disptype: str,
quote_fields: bool=True,
**params: str) -> str:
"""Sets ``Content-Disposition`` header.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError('bad content disposition type {!r}'
''.format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError('bad content disposition parameter'
' {!r}={!r}'.format(key, val))
qval = quote(val, '') if quote_fields else val
lparams.append((key, '"%s"' % qval))
if key == 'filename':
lparams.append(('filename*', "utf-8''" + qval))
sparams = '; '.join('='.join(pair) for pair in lparams)
value = '; '.join((value, sparams))
return value
class reify:
"""Use as a class method decorator. It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
def __init__(self, wrapped: Callable[..., Any]) -> None:
self.wrapped = wrapped
self.__doc__ = wrapped.__doc__
self.name = wrapped.__name__
def __get__(self, inst: Any, owner: Any) -> Any:
try:
try:
return inst._cache[self.name]
except KeyError:
val = self.wrapped(inst)
inst._cache[self.name] = val
return val
except AttributeError:
if inst is None:
return self
raise
def __set__(self, inst: Any, value: Any) -> None:
raise AttributeError("reified property is read-only")
reify_py = reify
try:
from ._helpers import reify as reify_c
if not NO_EXTENSIONS:
reify = reify_c # type: ignore
except ImportError:
pass
_ipv4_pattern = (r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}'
r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$')
_ipv6_pattern = (
r'^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}'
r'(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)'
r'((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})'
r'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}'
r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}'
r'[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)'
r'(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}'
r':|:(:[A-F0-9]{1,4}){7})$')
_ipv4_regex = re.compile(_ipv4_pattern)
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode('ascii'))
_ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE)
def _is_ip_address(
regex: Pattern[str], regexb: Pattern[bytes],
host: Optional[Union[str, bytes]])-> bool:
if host is None:
return False
if isinstance(host, str):
return bool(regex.match(host))
elif isinstance(host, (bytes, bytearray, memoryview)):
return bool(regexb.match(host))
else:
raise TypeError("{} [{}] is not a str or bytes"
.format(host, type(host)))
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
def is_ip_address(
host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
return is_ipv4_address(host) or is_ipv6_address(host)
_cached_current_datetime = None
_cached_formatted_datetime = None
def rfc822_formatted_time() -> str:
global _cached_current_datetime
global _cached_formatted_datetime
now = int(time.time())
if now != _cached_current_datetime:
# Weekday and month names for HTTP date/time formatting;
# always English!
# Tuples are constants stored in codeobject!
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
_monthname = ("", # Dummy so we can use 1-based month numbers
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(now) # type: ignore # noqa
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
_weekdayname[wd], day, _monthname[month], year, hh, mm, ss
)
_cached_current_datetime = now
return _cached_formatted_datetime # type: ignore
def _weakref_handle(info): # type: ignore
ref, name = info
ob = ref()
if ob is not None:
with suppress(Exception):
getattr(ob, name)()
def weakref_handle(ob, name, timeout, loop, ceil_timeout=True): # type: ignore
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if ceil_timeout:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
def call_later(cb, timeout, loop): # type: ignore
if timeout is not None and timeout > 0:
when = ceil(loop.time() + timeout)
return loop.call_at(when, cb)
class TimeoutHandle:
""" Timeout handle """
def __init__(self,
loop: asyncio.AbstractEventLoop,
timeout: Optional[float]) -> None:
self._timeout = timeout
self._loop = loop
self._callbacks = [] # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]] # noqa
def register(self, callback: Callable[..., None],
*args: Any, **kwargs: Any) -> None:
self._callbacks.append((callback, args, kwargs))
def close(self) -> None:
self._callbacks.clear()
def start(self) -> Optional[asyncio.Handle]:
if self._timeout is not None and self._timeout > 0:
at = ceil(self._loop.time() + self._timeout)
return self._loop.call_at(at, self.__call__)
else:
return None
def timer(self) -> 'BaseTimerContext':
if self._timeout is not None and self._timeout > 0:
timer = TimerContext(self._loop)
self.register(timer.timeout)
return timer
else:
return TimerNoop()
def __call__(self) -> None:
for cb, args, kwargs in self._callbacks:
with suppress(Exception):
cb(*args, **kwargs)
self._callbacks.clear()
class BaseTimerContext(ContextManager['BaseTimerContext']):
pass
class TimerNoop(BaseTimerContext):
def __enter__(self) -> BaseTimerContext:
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return False
class TimerContext(BaseTimerContext):
""" Low resolution timeout context manager """
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._tasks = [] # type: List[asyncio.Task[Any]]
self._cancelled = False
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
if task is None:
raise RuntimeError('Timeout context manager should be used '
'inside a task')
if self._cancelled:
task.cancel()
raise asyncio.TimeoutError from None
self._tasks.append(task)
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
if self._tasks:
self._tasks.pop()
if exc_type is asyncio.CancelledError and self._cancelled:
raise asyncio.TimeoutError from None
return None
def timeout(self) -> None:
if not self._cancelled:
for task in set(self._tasks):
task.cancel()
self._cancelled = True
class CeilTimeout(async_timeout.timeout):
def __enter__(self) -> async_timeout.timeout:
if self._timeout is not None:
self._task = current_task(loop=self._loop)
if self._task is None:
raise RuntimeError(
'Timeout context manager should be used inside a task')
self._cancel_handler = self._loop.call_at(
ceil(self._loop.time() + self._timeout), self._cancel_task)
return self
class HeadersMixin:
ATTRS = frozenset([
'_content_type', '_content_dict', '_stored_content_type'])
_content_type = None # type: Optional[str]
_content_dict = None # type: Optional[Dict[str, str]]
_stored_content_type = sentinel
def _parse_content_type(self, raw: str) -> None:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = 'application/octet-stream'
self._content_dict = {}
else:
self._content_type, self._content_dict = cgi.parse_header(raw)
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore
@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get('charset') # type: ignore
@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
if content_length is not None:
return int(content_length)
else:
return None
def set_result(fut: 'asyncio.Future[_T]', result: _T) -> None:
if not fut.done():
fut.set_result(result)
def set_exception(fut: 'asyncio.Future[_T]', exc: BaseException) -> None:
if not fut.done():
fut.set_exception(exc)
class ChainMapProxy(Mapping[str, Any]):
__slots__ = ('_maps',)
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
raise TypeError("Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__))
def __getitem__(self, key: str) -> Any:
for mapping in self._maps:
try:
return mapping[key]
except KeyError:
pass
raise KeyError(key)
def get(self, key: str, default: Any=None) -> Any:
return self[key] if key in self else default
def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps)) # type: ignore
def __iter__(self) -> Iterator[str]:
d = {} # type: Dict[str, Any]
for mapping in reversed(self._maps):
# reuses stored hash values if possible
d.update(mapping)
return iter(d)
def __contains__(self, key: object) -> bool:
return any(key in m for m in self._maps)
def __bool__(self) -> bool:
return any(self._maps)
def __repr__(self) -> str:
content = ", ".join(map(repr, self._maps))
return 'ChainMapProxy({})'.format(content)

View file

@ -0,0 +1,56 @@
import http.server
import sys
from typing import Mapping, Tuple # noqa
from . import __version__
from .http_exceptions import HttpProcessingError
from .http_parser import (
HeadersParser,
HttpParser,
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
RawResponseMessage,
)
from .http_websocket import (
WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE,
WS_KEY,
WebSocketError,
WebSocketReader,
WebSocketWriter,
WSCloseCode,
WSMessage,
WSMsgType,
ws_ext_gen,
ws_ext_parse,
)
from .http_writer import (
HttpVersion,
HttpVersion10,
HttpVersion11,
StreamWriter,
)
__all__ = (
'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE',
# .http_writer
'StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11',
# .http_parser
'HeadersParser', 'HttpParser',
'HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage',
# .http_websocket
'WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'ws_ext_gen', 'ws_ext_parse',
'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode',
)
SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
sys.version_info, __version__) # type: str
RESPONSES = http.server.BaseHTTPRequestHandler.responses # type: Mapping[int, Tuple[str, str]] # noqa

View file

@ -0,0 +1,98 @@
"""Low-level http related exceptions."""
from typing import Optional, Union
from .typedefs import _CIMultiDict
__all__ = ('HttpProcessingError',)
class HttpProcessingError(Exception):
"""HTTP error.
Shortcut for raising HTTP errors with custom code, message and headers.
code: HTTP Error code.
message: (optional) Error message.
headers: (optional) Headers to be sent in response, a list of pairs
"""
code = 0
message = ''
headers = None
def __init__(self, *,
code: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
if code is not None:
self.code = code
self.headers = headers
self.message = message
super().__init__("%s, message='%s'" % (self.code, message))
class BadHttpMessage(HttpProcessingError):
code = 400
message = 'Bad Request'
def __init__(self, message: str, *,
headers: Optional[_CIMultiDict]=None) -> None:
super().__init__(message=message, headers=headers)
class HttpBadRequest(BadHttpMessage):
code = 400
message = 'Bad Request'
class PayloadEncodingError(BadHttpMessage):
"""Base class for payload errors"""
class ContentEncodingError(PayloadEncodingError):
"""Content encoding error."""
class TransferEncodingError(PayloadEncodingError):
"""transfer encoding error."""
class ContentLengthError(PayloadEncodingError):
"""Not enough data for satisfy content length header."""
class LineTooLong(BadHttpMessage):
def __init__(self, line: str,
limit: str='Unknown',
actual_size: str='Unknown') -> None:
super().__init__(
"Got more than %s bytes (%s) when reading %s." % (
limit, actual_size, line))
class InvalidHeader(BadHttpMessage):
def __init__(self, hdr: Union[bytes, str]) -> None:
if isinstance(hdr, bytes):
hdr = hdr.decode('utf-8', 'surrogateescape')
super().__init__('Invalid HTTP Header: {}'.format(hdr))
self.hdr = hdr
class BadStatusLine(BadHttpMessage):
def __init__(self, line: str='') -> None:
if not line:
line = repr(line)
self.args = line,
self.line = line
class InvalidURLError(BadHttpMessage):
pass

View file

@ -0,0 +1,764 @@
import abc
import asyncio
import collections
import re
import string
import zlib
from enum import IntEnum
from typing import Any, List, Optional, Tuple, Type, Union # noqa
from multidict import CIMultiDict, CIMultiDictProxy, istr
from yarl import URL
from . import hdrs
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS, BaseTimerContext
from .http_exceptions import (
BadStatusLine,
ContentEncodingError,
ContentLengthError,
InvalidHeader,
LineTooLong,
TransferEncodingError,
)
from .http_writer import HttpVersion, HttpVersion10
from .log import internal_logger
from .streams import EMPTY_PAYLOAD, StreamReader
from .typedefs import RawHeaders
try:
import brotli
HAS_BROTLI = True
except ImportError: # pragma: no cover
HAS_BROTLI = False
__all__ = (
'HeadersParser', 'HttpParser', 'HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage')
ASCIISET = set(string.printable)
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
# and https://tools.ietf.org/html/rfc7230#appendix-B
#
# method = token
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
VERSRE = re.compile(r'HTTP/(\d+).(\d+)')
HDRRE = re.compile(rb'[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]')
RawRequestMessage = collections.namedtuple(
'RawRequestMessage',
['method', 'path', 'version', 'headers', 'raw_headers',
'should_close', 'compression', 'upgrade', 'chunked', 'url'])
RawResponseMessage = collections.namedtuple(
'RawResponseMessage',
['version', 'code', 'reason', 'headers', 'raw_headers',
'should_close', 'compression', 'upgrade', 'chunked'])
class ParseState(IntEnum):
PARSE_NONE = 0
PARSE_LENGTH = 1
PARSE_CHUNKED = 2
PARSE_UNTIL_EOF = 3
class ChunkState(IntEnum):
PARSE_CHUNKED_SIZE = 0
PARSE_CHUNKED_CHUNK = 1
PARSE_CHUNKED_CHUNK_EOF = 2
PARSE_MAYBE_TRAILERS = 3
PARSE_TRAILERS = 4
class HeadersParser:
def __init__(self,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190) -> None:
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]', RawHeaders]:
headers = CIMultiDict() # type: CIMultiDict[str]
raw_headers = []
lines_idx = 1
line = lines[1]
line_count = len(lines)
while line:
# Parse initial header name : value pair.
try:
bname, bvalue = line.split(b':', 1)
except ValueError:
raise InvalidHeader(line) from None
bname = bname.strip(b' \t')
bvalue = bvalue.lstrip()
if HDRRE.search(bname):
raise InvalidHeader(bname)
if len(bname) > self.max_field_size:
raise LineTooLong(
"request header name {}".format(
bname.decode("utf8", "xmlcharrefreplace")),
str(self.max_field_size),
str(len(bname)))
header_length = len(bvalue)
# next line
lines_idx += 1
line = lines[lines_idx]
# consume continuation lines
continuation = line and line[0] in (32, 9) # (' ', '\t')
if continuation:
bvalue_lst = [bvalue]
while continuation:
header_length += len(line)
if header_length > self.max_field_size:
raise LineTooLong(
'request header field {}'.format(
bname.decode("utf8", "xmlcharrefreplace")),
str(self.max_field_size),
str(header_length))
bvalue_lst.append(line)
# next line
lines_idx += 1
if lines_idx < line_count:
line = lines[lines_idx]
if line:
continuation = line[0] in (32, 9) # (' ', '\t')
else:
line = b''
break
bvalue = b''.join(bvalue_lst)
else:
if header_length > self.max_field_size:
raise LineTooLong(
'request header field {}'.format(
bname.decode("utf8", "xmlcharrefreplace")),
str(self.max_field_size),
str(header_length))
bvalue = bvalue.strip()
name = bname.decode('utf-8', 'surrogateescape')
value = bvalue.decode('utf-8', 'surrogateescape')
headers.add(name, value)
raw_headers.append((bname, bvalue))
return (CIMultiDictProxy(headers), tuple(raw_headers))
class HttpParser(abc.ABC):
def __init__(self, protocol: Optional[BaseProtocol]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190,
timer: Optional[BaseTimerContext]=None,
code: Optional[int]=None,
method: Optional[str]=None,
readall: bool=False,
payload_exception: Optional[Type[BaseException]]=None,
response_with_body: bool=True,
read_until_eof: bool=False,
auto_decompress: bool=True) -> None:
self.protocol = protocol
self.loop = loop
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
self.timer = timer
self.code = code
self.method = method
self.readall = readall
self.payload_exception = payload_exception
self.response_with_body = response_with_body
self.read_until_eof = read_until_eof
self._lines = [] # type: List[bytes]
self._tail = b''
self._upgraded = False
self._payload = None
self._payload_parser = None # type: Optional[HttpPayloadParser]
self._auto_decompress = auto_decompress
self._headers_parser = HeadersParser(max_line_size,
max_headers,
max_field_size)
@abc.abstractmethod
def parse_message(self, lines: List[bytes]) -> Any:
pass
def feed_eof(self) -> Any:
if self._payload_parser is not None:
self._payload_parser.feed_eof()
self._payload_parser = None
else:
# try to extract partial message
if self._tail:
self._lines.append(self._tail)
if self._lines:
if self._lines[-1] != '\r\n':
self._lines.append(b'')
try:
return self.parse_message(self._lines)
except Exception:
return None
def feed_data(
self,
data: bytes,
SEP: bytes=b'\r\n',
EMPTY: bytes=b'',
CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH,
METH_CONNECT: str=hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr=hdrs.SEC_WEBSOCKET_KEY1
) -> Tuple[List[Any], bool, bytes]:
messages = []
if self._tail:
data, self._tail = self._tail + data, b''
data_len = len(data)
start_pos = 0
loop = self.loop
while start_pos < data_len:
# read HTTP message (request/response line + headers), \r\n\r\n
# and split by lines
if self._payload_parser is None and not self._upgraded:
pos = data.find(SEP, start_pos)
# consume \r\n
if pos == start_pos and not self._lines:
start_pos = pos + 2
continue
if pos >= start_pos:
# line found
self._lines.append(data[start_pos:pos])
start_pos = pos + 2
# \r\n\r\n found
if self._lines[-1] == EMPTY:
try:
msg = self.parse_message(self._lines)
finally:
self._lines.clear()
# payload length
length = msg.headers.get(CONTENT_LENGTH)
if length is not None:
try:
length = int(length)
except ValueError:
raise InvalidHeader(CONTENT_LENGTH)
if length < 0:
raise InvalidHeader(CONTENT_LENGTH)
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in msg.headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
self._upgraded = msg.upgrade
method = getattr(msg, 'method', self.method)
assert self.protocol is not None
# calculate payload
if ((length is not None and length > 0) or
msg.chunked and not msg.upgrade):
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
payload_parser = HttpPayloadParser(
payload, length=length,
chunked=msg.chunked, method=method,
compression=msg.compression,
code=self.code, readall=self.readall,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress)
if not payload_parser.done:
self._payload_parser = payload_parser
elif method == METH_CONNECT:
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self._upgraded = True
self._payload_parser = HttpPayloadParser(
payload, method=msg.method,
compression=msg.compression, readall=True,
auto_decompress=self._auto_decompress)
else:
if (getattr(msg, 'code', 100) >= 199 and
length is None and self.read_until_eof):
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
payload_parser = HttpPayloadParser(
payload, length=length,
chunked=msg.chunked, method=method,
compression=msg.compression,
code=self.code, readall=True,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress)
if not payload_parser.done:
self._payload_parser = payload_parser
else:
payload = EMPTY_PAYLOAD # type: ignore
messages.append((msg, payload))
else:
self._tail = data[start_pos:]
data = EMPTY
break
# no parser, just store
elif self._payload_parser is None and self._upgraded:
assert not self._lines
break
# feed payload
elif data and start_pos < data_len:
assert not self._lines
assert self._payload_parser is not None
try:
eof, data = self._payload_parser.feed_data(
data[start_pos:])
except BaseException as exc:
if self.payload_exception is not None:
self._payload_parser.payload.set_exception(
self.payload_exception(str(exc)))
else:
self._payload_parser.payload.set_exception(exc)
eof = True
data = b''
if eof:
start_pos = 0
data_len = len(data)
self._payload_parser = None
continue
else:
break
if data and start_pos < data_len:
data = data[start_pos:]
else:
data = EMPTY
return messages, self._upgraded, data
def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]',
RawHeaders,
Optional[bool],
Optional[str],
bool,
bool]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
headers, raw_headers = self._headers_parser.parse_headers(lines)
close_conn = None
encoding = None
upgrade = False
chunked = False
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == 'close':
close_conn = True
elif v == 'keep-alive':
close_conn = False
elif v == 'upgrade':
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te and 'chunked' in te.lower():
chunked = True
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
class HttpRequestParser(HttpParser):
"""Read request status line. Exception .http_exceptions.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
def parse_message(self, lines: List[bytes]) -> Any:
# request line
line = lines[0].decode('utf-8', 'surrogateescape')
try:
method, path, version = line.split(None, 2)
except ValueError:
raise BadStatusLine(line) from None
if len(path) > self.max_line_size:
raise LineTooLong(
'Status line is too long',
str(self.max_line_size),
str(len(path)))
# method
if not METHRE.match(method):
raise BadStatusLine(method)
# version
try:
if version.startswith('HTTP/'):
n1, n2 = version[5:].split('.', 1)
version_o = HttpVersion(int(n1), int(n2))
else:
raise BadStatusLine(version)
except Exception:
raise BadStatusLine(version)
# read headers
(headers, raw_headers,
close, compression, upgrade, chunked) = self.parse_headers(lines)
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
close = True
else: # HTTP 1.1 must ask to close.
close = False
return RawRequestMessage(
method, path, version_o, headers, raw_headers,
close, compression, upgrade, chunked, URL(path))
class HttpResponseParser(HttpParser):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage"""
def parse_message(self, lines: List[bytes]) -> Any:
line = lines[0].decode('utf-8', 'surrogateescape')
try:
version, status = line.split(None, 1)
except ValueError:
raise BadStatusLine(line) from None
try:
status, reason = status.split(None, 1)
except ValueError:
reason = ''
if len(reason) > self.max_line_size:
raise LineTooLong(
'Status line is too long',
str(self.max_line_size),
str(len(reason)))
# version
match = VERSRE.match(version)
if match is None:
raise BadStatusLine(line)
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit number
try:
status_i = int(status)
except ValueError:
raise BadStatusLine(line) from None
if status_i > 999:
raise BadStatusLine(line)
# read headers
(headers, raw_headers,
close, compression, upgrade, chunked) = self.parse_headers(lines)
if close is None:
close = version_o <= HttpVersion10
return RawResponseMessage(
version_o, status_i, reason.strip(),
headers, raw_headers, close, compression, upgrade, chunked)
class HttpPayloadParser:
def __init__(self, payload: StreamReader,
length: Optional[int]=None,
chunked: bool=False,
compression: Optional[str]=None,
code: Optional[int]=None,
method: Optional[str]=None,
readall: bool=False,
response_with_body: bool=True,
auto_decompress: bool=True) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
self._chunk_size = 0
self._chunk_tail = b''
self._auto_decompress = auto_decompress
self.done = False
# payload decompression wrapper
if response_with_body and compression and self._auto_decompress:
real_payload = DeflateBuffer(payload, compression) # type: Union[StreamReader, DeflateBuffer] # noqa
else:
real_payload = payload
# payload parser
if not response_with_body:
# don't parse payload if it's not expected to be received
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
elif chunked:
self._type = ParseState.PARSE_CHUNKED
elif length is not None:
self._type = ParseState.PARSE_LENGTH
self._length = length
if self._length == 0:
real_payload.feed_eof()
self.done = True
else:
if readall and code != 204:
self._type = ParseState.PARSE_UNTIL_EOF
elif method in ('PUT', 'POST'):
internal_logger.warning( # pragma: no cover
'Content-Length or Transfer-Encoding header is required')
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
self.payload = real_payload
def feed_eof(self) -> None:
if self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_eof()
elif self._type == ParseState.PARSE_LENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
elif self._type == ParseState.PARSE_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
def feed_data(self,
chunk: bytes,
SEP: bytes=b'\r\n',
CHUNK_EXT: bytes=b';') -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
chunk_len = len(chunk)
if required >= chunk_len:
self._length = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
if self._length == 0:
self.payload.feed_eof()
return True, b''
else:
self._length = 0
self.payload.feed_data(chunk[:required], required)
self.payload.feed_eof()
return True, chunk[required:]
# Chunked transfer encoding parser
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b''
while chunk:
# read next chunk size
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
pos = chunk.find(SEP)
if pos >= 0:
i = chunk.find(CHUNK_EXT, 0, pos)
if i >= 0:
size_b = chunk[:i] # strip chunk-extensions
else:
size_b = chunk[:pos]
try:
size = int(bytes(size_b), 16)
except ValueError:
exc = TransferEncodingError(
chunk[:pos].decode('ascii', 'surrogateescape'))
self.payload.set_exception(exc)
raise exc from None
chunk = chunk[pos+2:]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b''
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
required = self._chunk_size
chunk_len = len(chunk)
if required > chunk_len:
self._chunk_size = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
return False, b''
else:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
chunk = chunk[required:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
if chunk[:2] == SEP:
chunk = chunk[2:]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b''
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
if chunk[:2] == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[2:]
else:
self._chunk = ChunkState.PARSE_TRAILERS
# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos+2:]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
return False, b''
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
return False, b''
class DeflateBuffer:
"""DeflateStream decompress stream and feed data into specified stream."""
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
self.out = out
self.size = 0
self.encoding = encoding
self._started_decoding = False
if encoding == 'br':
if not HAS_BROTLI: # pragma: no cover
raise ContentEncodingError(
'Can not decode content-encoding: brotli (br). '
'Please install `brotlipy`')
self.decompressor = brotli.Decompressor()
else:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def set_exception(self, exc: BaseException) -> None:
self.out.set_exception(exc)
def feed_data(self, chunk: bytes, size: int) -> None:
self.size += size
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
if not self._started_decoding and self.encoding == 'deflate':
self.decompressor = zlib.decompressobj()
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
raise ContentEncodingError(
'Can not decode content-encoding: %s' % self.encoding)
else:
raise ContentEncodingError(
'Can not decode content-encoding: %s' % self.encoding)
if chunk:
self._started_decoding = True
self.out.feed_data(chunk, len(chunk))
def feed_eof(self) -> None:
chunk = self.decompressor.flush()
if chunk or self.size > 0:
self.out.feed_data(chunk, len(chunk))
if self.encoding == 'deflate' and not self.decompressor.eof:
raise ContentEncodingError('deflate')
self.out.feed_eof()
def begin_http_chunk_receiving(self) -> None:
self.out.begin_http_chunk_receiving()
def end_http_chunk_receiving(self) -> None:
self.out.end_http_chunk_receiving()
HttpRequestParserPy = HttpRequestParser
HttpResponseParserPy = HttpResponseParser
RawRequestMessagePy = RawRequestMessage
RawResponseMessagePy = RawResponseMessage
try:
if not NO_EXTENSIONS:
from ._http_parser import (HttpRequestParser, # type: ignore # noqa
HttpResponseParser,
RawRequestMessage,
RawResponseMessage)
HttpRequestParserC = HttpRequestParser
HttpResponseParserC = HttpResponseParser
RawRequestMessageC = RawRequestMessage
RawResponseMessageC = RawResponseMessage
except ImportError: # pragma: no cover
pass

View file

@ -0,0 +1,653 @@
"""WebSocket protocol versions 13 and 8."""
import asyncio
import collections
import json
import random
import re
import sys
import zlib
from enum import IntEnum
from struct import Struct
from typing import Any, Callable, List, Optional, Tuple, Union
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
from .log import ws_logger
from .streams import DataQueue
__all__ = ('WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'WSMessage',
'WebSocketError', 'WSMsgType', 'WSCloseCode')
class WSCloseCode(IntEnum):
OK = 1000
GOING_AWAY = 1001
PROTOCOL_ERROR = 1002
UNSUPPORTED_DATA = 1003
INVALID_TEXT = 1007
POLICY_VIOLATION = 1008
MESSAGE_TOO_BIG = 1009
MANDATORY_EXTENSION = 1010
INTERNAL_ERROR = 1011
SERVICE_RESTART = 1012
TRY_AGAIN_LATER = 1013
ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode}
class WSMsgType(IntEnum):
# websocket spec types
CONTINUATION = 0x0
TEXT = 0x1
BINARY = 0x2
PING = 0x9
PONG = 0xa
CLOSE = 0x8
# aiohttp specific types
CLOSING = 0x100
CLOSED = 0x101
ERROR = 0x102
text = TEXT
binary = BINARY
ping = PING
pong = PONG
close = CLOSE
closing = CLOSING
closed = CLOSED
error = ERROR
WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
UNPACK_LEN2 = Struct('!H').unpack_from
UNPACK_LEN3 = Struct('!Q').unpack_from
UNPACK_CLOSE_CODE = Struct('!H').unpack
PACK_LEN1 = Struct('!BB').pack
PACK_LEN2 = Struct('!BBH').pack
PACK_LEN3 = Struct('!BBQ').pack
PACK_CLOSE_CODE = Struct('!H').pack
MSG_SIZE = 2 ** 14
DEFAULT_LIMIT = 2 ** 16
_WSMessageBase = collections.namedtuple('_WSMessageBase',
['type', 'data', 'extra'])
class WSMessage(_WSMessageBase):
def json(self, *, # type: ignore
loads: Callable[[Any], Any]=json.loads) -> None:
"""Return parsed JSON data.
.. versionadded:: 0.22
"""
return loads(self.data)
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
class WebSocketError(Exception):
"""WebSocket protocol parser error."""
def __init__(self, code: int, message: str) -> None:
self.code = code
super().__init__(message)
class WSHandshakeError(Exception):
"""WebSocket protocol handshake error."""
native_byteorder = sys.byteorder
# Used by _websocket_mask_python
_XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)]
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
object of any length. The contents of `data` are masked with `mask`,
as specified in section 5.3 of RFC 6455.
Note that this function mutates the `data` argument.
This pure-python implementation may be replaced by an optimized
version when available.
"""
assert isinstance(data, bytearray), data
assert len(mask) == 4, mask
if data:
a, b, c, d = (_XOR_TABLE[n] for n in mask)
data[::4] = data[::4].translate(a)
data[1::4] = data[1::4].translate(b)
data[2::4] = data[2::4].translate(c)
data[3::4] = data[3::4].translate(d)
if NO_EXTENSIONS: # pragma: no cover
_websocket_mask = _websocket_mask_python
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore
_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
_websocket_mask = _websocket_mask_python
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xff, 0xff])
_WS_EXT_RE = re.compile(r'^(?:;\s*(?:'
r'(server_no_context_takeover)|'
r'(client_no_context_takeover)|'
r'(server_max_window_bits(?:=(\d+))?)|'
r'(client_max_window_bits(?:=(\d+))?)))*$')
_WS_EXT_RE_SPLIT = re.compile(r'permessage-deflate([^,]+)?')
def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]:
if not extstr:
return 0, False
compress = 0
notakeover = False
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
defext = ext.group(1)
# Return compress = 15 when get `permessage-deflate`
if not defext:
compress = 15
break
match = _WS_EXT_RE.match(defext)
if match:
compress = 15
if isserver:
# Server never fail to detect compress handshake.
# Server does not need to send max wbit to client
if match.group(4):
compress = int(match.group(4))
# Group3 must match if group4 matches
# Compress wbit 8 does not support in zlib
# If compress level not support,
# CONTINUE to next extension
if compress > 15 or compress < 9:
compress = 0
continue
if match.group(1):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
else:
if match.group(6):
compress = int(match.group(6))
# Group5 must match if group6 matches
# Compress wbit 8 does not support in zlib
# If compress level not support,
# FAIL the parse progress
if compress > 15 or compress < 9:
raise WSHandshakeError('Invalid window size')
if match.group(2):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
# Return Fail if client side and not match
elif not isserver:
raise WSHandshakeError('Extension for deflate not supported' +
ext.group(1))
return compress, notakeover
def ws_ext_gen(compress: int=15, isserver: bool=False,
server_notakeover: bool=False) -> str:
# client_notakeover=False not used for server
# compress wbit 8 does not support in zlib
if compress < 9 or compress > 15:
raise ValueError('Compress wbits must between 9 and 15, '
'zlib does not support wbits=8')
enabledext = ['permessage-deflate']
if not isserver:
enabledext.append('client_max_window_bits')
if compress < 15:
enabledext.append('server_max_window_bits=' + str(compress))
if server_notakeover:
enabledext.append('server_no_context_takeover')
# if client_notakeover:
# enabledext.append('client_no_context_takeover')
return '; '.join(enabledext)
class WSParserState(IntEnum):
READ_HEADER = 1
READ_PAYLOAD_LENGTH = 2
READ_PAYLOAD_MASK = 3
READ_PAYLOAD = 4
class WebSocketReader:
def __init__(self, queue: DataQueue[WSMessage],
max_msg_size: int, compress: bool=True) -> None:
self.queue = queue
self._max_msg_size = max_msg_size
self._exc = None # type: Optional[BaseException]
self._partial = bytearray()
self._state = WSParserState.READ_HEADER
self._opcode = None # type: Optional[int]
self._frame_fin = False
self._frame_opcode = None # type: Optional[int]
self._frame_payload = bytearray()
self._tail = b''
self._has_mask = False
self._frame_mask = None # type: Optional[bytes]
self._payload_length = 0
self._payload_length_flag = 0
self._compressed = None # type: Optional[bool]
self._decompressobj = None # type: Any # zlib.decompressobj actually
self._compress = compress
def feed_eof(self) -> None:
self.queue.feed_eof()
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
if self._exc:
return True, data
try:
return self._feed_data(data)
except Exception as exc:
self._exc = exc
self.queue.set_exception(exc)
return True, b''
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
for fin, opcode, payload, compressed in self.parse_frame(data):
if compressed and not self._decompressobj:
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
if opcode == WSMsgType.CLOSE:
if len(payload) >= 2:
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
if (close_code < 3000 and
close_code not in ALLOWED_CLOSE_CODES):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close code: {}'.format(close_code))
try:
close_message = payload[2:].decode('utf-8')
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
elif payload:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close frame: {} {} {!r}'.format(
fin, opcode, payload))
else:
msg = WSMessage(WSMsgType.CLOSE, 0, '')
self.queue.feed_data(msg, 0)
elif opcode == WSMsgType.PING:
self.queue.feed_data(
WSMessage(WSMsgType.PING, payload, ''), len(payload))
elif opcode == WSMsgType.PONG:
self.queue.feed_data(
WSMessage(WSMsgType.PONG, payload, ''), len(payload))
elif opcode not in (
WSMsgType.TEXT, WSMsgType.BINARY) and self._opcode is None:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Unexpected opcode={!r}".format(opcode))
else:
# load text/binary
if not fin:
# got partial frame payload
if opcode != WSMsgType.CONTINUATION:
self._opcode = opcode
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
else:
# previous frame was non finished
# we should get continuation opcode
if self._partial:
if opcode != WSMsgType.CONTINUATION:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'The opcode in non-fin frame is expected '
'to be zero, got {!r}'.format(opcode))
if opcode == WSMsgType.CONTINUATION:
assert self._opcode is not None
opcode = self._opcode
self._opcode = None
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
# Decompress process must to be done after all packets
# received.
if compressed:
self._partial.extend(_WS_DEFLATE_TRAILING)
payload_merged = self._decompressobj.decompress(
self._partial, self._max_msg_size)
if self._decompressobj.unconsumed_tail:
left = len(self._decompressobj.unconsumed_tail)
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Decompressed message size exceeds limit {}".
format(self._max_msg_size + left,
self._max_msg_size))
else:
payload_merged = bytes(self._partial)
self._partial.clear()
if opcode == WSMsgType.TEXT:
try:
text = payload_merged.decode('utf-8')
self.queue.feed_data(
WSMessage(WSMsgType.TEXT, text, ''), len(text))
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
else:
self.queue.feed_data(
WSMessage(WSMsgType.BINARY, payload_merged, ''),
len(payload_merged))
return False, b''
def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int],
bytearray,
Optional[bool]]]:
"""Return the next frame from the socket."""
frames = []
if self._tail:
buf, self._tail = self._tail + buf, b''
start_pos = 0
buf_length = len(buf)
while True:
# read header
if self._state == WSParserState.READ_HEADER:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
start_pos += 2
first_byte, second_byte = data
fin = (first_byte >> 7) & 1
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
# frame-fin = %x0 ; more frames of this message follow
# / %x1 ; final frame of this message
# frame-rsv1 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv2 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv3 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
#
# Remove rsv1 from this test for deflate development
if rsv2 or rsv3 or (rsv1 and not self._compress):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
if opcode > 0x7 and fin == 0:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received fragmented control frame')
has_mask = (second_byte >> 7) & 1
length = second_byte & 0x7f
# Control frames MUST have a payload
# length of 125 bytes or less
if opcode > 0x7 and length > 125:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Control frame payload cannot be '
'larger than 125 bytes')
# Set compress status if last package is FIN
# OR set compress status if this is first fragment
# Raise error if not first fragment with rsv1 = 0x1
if self._frame_fin or self._compressed is None:
self._compressed = True if rsv1 else False
elif rsv1:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
self._frame_fin = bool(fin)
self._frame_opcode = opcode
self._has_mask = bool(has_mask)
self._payload_length_flag = length
self._state = WSParserState.READ_PAYLOAD_LENGTH
else:
break
# read payload length
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
length = self._payload_length_flag
if length == 126:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
start_pos += 2
length = UNPACK_LEN2(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else:
break
elif length > 126:
if buf_length - start_pos >= 8:
data = buf[start_pos:start_pos+8]
start_pos += 8
length = UNPACK_LEN3(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else:
break
else:
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
# read payload mask
if self._state == WSParserState.READ_PAYLOAD_MASK:
if buf_length - start_pos >= 4:
self._frame_mask = buf[start_pos:start_pos+4]
start_pos += 4
self._state = WSParserState.READ_PAYLOAD
else:
break
if self._state == WSParserState.READ_PAYLOAD:
length = self._payload_length
payload = self._frame_payload
chunk_len = buf_length - start_pos
if length >= chunk_len:
self._payload_length = length - chunk_len
payload.extend(buf[start_pos:])
start_pos = buf_length
else:
self._payload_length = 0
payload.extend(buf[start_pos:start_pos+length])
start_pos = start_pos + length
if self._payload_length == 0:
if self._has_mask:
assert self._frame_mask is not None
_websocket_mask(self._frame_mask, payload)
frames.append((
self._frame_fin,
self._frame_opcode,
payload,
self._compressed))
self._frame_payload = bytearray()
self._state = WSParserState.READ_HEADER
else:
break
self._tail = buf[start_pos:]
return frames
class WebSocketWriter:
def __init__(self, protocol: BaseProtocol, transport: asyncio.Transport, *,
use_mask: bool=False, limit: int=DEFAULT_LIMIT,
random: Any=random.Random(),
compress: int=0, notakeover: bool=False) -> None:
self.protocol = protocol
self.transport = transport
self.use_mask = use_mask
self.randrange = random.randrange
self.compress = compress
self.notakeover = notakeover
self._closing = False
self._limit = limit
self._output_size = 0
self._compressobj = None # type: Any # actually compressobj
async def _send_frame(self, message: bytes, opcode: int,
compress: Optional[int]=None) -> None:
"""Send a frame over the websocket with message as its payload."""
if self._closing:
ws_logger.warning('websocket connection is closing.')
rsv = 0
# Only compress larger packets (disabled)
# Does small packet needs to be compressed?
# if self.compress and opcode < 8 and len(message) > 124:
if (compress or self.compress) and opcode < 8:
if compress:
# Do not set self._compress if compressing is for this frame
compressobj = zlib.compressobj(wbits=-compress)
else: # self.compress
if not self._compressobj:
self._compressobj = zlib.compressobj(wbits=-self.compress)
compressobj = self._compressobj
message = compressobj.compress(message)
message = message + compressobj.flush(
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
if message.endswith(_WS_DEFLATE_TRAILING):
message = message[:-4]
rsv = rsv | 0x40
msg_length = len(message)
use_mask = self.use_mask
if use_mask:
mask_bit = 0x80
else:
mask_bit = 0
if msg_length < 126:
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
elif msg_length < (1 << 16):
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
else:
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
if use_mask:
mask = self.randrange(0, 0xffffffff)
mask = mask.to_bytes(4, 'big')
message = bytearray(message)
_websocket_mask(mask, message)
self.transport.write(header + mask + message)
self._output_size += len(header) + len(mask) + len(message)
else:
if len(message) > MSG_SIZE:
self.transport.write(header)
self.transport.write(message)
else:
self.transport.write(header + message)
self._output_size += len(header) + len(message)
if self._output_size > self._limit:
self._output_size = 0
await self.protocol._drain_helper()
async def pong(self, message: bytes=b'') -> None:
"""Send pong message."""
if isinstance(message, str):
message = message.encode('utf-8')
await self._send_frame(message, WSMsgType.PONG)
async def ping(self, message: bytes=b'') -> None:
"""Send ping message."""
if isinstance(message, str):
message = message.encode('utf-8')
await self._send_frame(message, WSMsgType.PING)
async def send(self, message: Union[str, bytes],
binary: bool=False,
compress: Optional[int]=None) -> None:
"""Send a frame over the websocket with message as its payload."""
if isinstance(message, str):
message = message.encode('utf-8')
if binary:
await self._send_frame(message, WSMsgType.BINARY, compress)
else:
await self._send_frame(message, WSMsgType.TEXT, compress)
async def close(self, code: int=1000, message: bytes=b'') -> None:
"""Close the websocket, sending the specified code and message."""
if isinstance(message, str):
message = message.encode('utf-8')
try:
await self._send_frame(
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE)
finally:
self._closing = True

View file

@ -0,0 +1,172 @@
"""Http related parsers and protocol."""
import asyncio
import collections
import zlib
from typing import Any, Awaitable, Callable, Optional, Union # noqa
from multidict import CIMultiDict # noqa
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
__all__ = ('StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11')
HttpVersion = collections.namedtuple('HttpVersion', ['major', 'minor'])
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
class StreamWriter(AbstractStreamWriter):
def __init__(self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None) -> None:
self._protocol = protocol
self._transport = protocol.transport
self.loop = loop
self.length = None
self.chunked = False
self.buffer_size = 0
self.output_size = 0
self._eof = False
self._compress = None # type: Any
self._drain_waiter = None
self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
@property
def transport(self) -> Optional[asyncio.Transport]:
return self._transport
@property
def protocol(self) -> BaseProtocol:
return self._protocol
def enable_chunking(self) -> None:
self.chunked = True
def enable_compression(self, encoding: str='deflate') -> None:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self._compress = zlib.compressobj(wbits=zlib_mode)
def _write(self, chunk: bytes) -> None:
size = len(chunk)
self.buffer_size += size
self.output_size += size
if self._transport is None or self._transport.is_closing():
raise ConnectionResetError('Cannot write to closing transport')
self._transport.write(chunk)
async def write(self, chunk: bytes,
*, drain: bool=True, LIMIT: int=0x10000) -> None:
"""Writes chunk of data to a stream.
write_eof() indicates end of stream.
writer can't be used after write_eof() method being called.
write() return drain future.
"""
if self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if self._compress is not None:
chunk = self._compress.compress(chunk)
if not chunk:
return
if self.length is not None:
chunk_len = len(chunk)
if self.length >= chunk_len:
self.length = self.length - chunk_len
else:
chunk = chunk[:self.length]
self.length = 0
if not chunk:
return
if chunk:
if self.chunked:
chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len_pre + chunk + b'\r\n'
self._write(chunk)
if self.buffer_size > LIMIT and drain:
self.buffer_size = 0
await self.drain()
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
"""Write request/response status and headers."""
# status + headers
buf = _serialize_headers(status_line, headers)
self._write(buf)
async def write_eof(self, chunk: bytes=b'') -> None:
if self._eof:
return
if chunk and self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if self._compress:
if chunk:
chunk = self._compress.compress(chunk)
chunk = chunk + self._compress.flush()
if chunk and self.chunked:
chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
else:
if self.chunked:
if chunk:
chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
else:
chunk = b'0\r\n\r\n'
if chunk:
self._write(chunk)
await self.drain()
self._eof = True
self._transport = None
async def drain(self) -> None:
"""Flush the write buffer.
The intended use is to write
await w.write(data)
await w.drain()
"""
if self._protocol.transport is not None:
await self._protocol._drain_helper()
def _py_serialize_headers(status_line: str,
headers: 'CIMultiDict[str]') -> bytes:
line = status_line + '\r\n' + ''.join(
[k + ': ' + v + '\r\n' for k, v in headers.items()])
return line.encode('utf-8') + b'\r\n'
_serialize_headers = _py_serialize_headers
try:
import aiohttp._http_writer as _http_writer # type: ignore
_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
_serialize_headers = _c_serialize_headers
except ImportError:
pass

View file

@ -0,0 +1,44 @@
import asyncio
import collections
from typing import Any, Optional
try:
from typing import Deque
except ImportError:
from typing_extensions import Deque # noqa
class EventResultOrError:
"""
This class wrappers the Event asyncio lock allowing either awake the
locked Tasks without any error or raising an exception.
thanks to @vorpalsmith for the simple design.
"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._exc = None # type: Optional[BaseException]
self._event = asyncio.Event(loop=loop)
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
def set(self, exc: Optional[BaseException]=None) -> None:
self._exc = exc
self._event.set()
async def wait(self) -> Any:
waiter = self._loop.create_task(self._event.wait())
self._waiters.append(waiter)
try:
val = await waiter
finally:
self._waiters.remove(waiter)
if self._exc is not None:
raise self._exc
return val
def cancel(self) -> None:
""" Cancel all waiters """
for waiter in self._waiters:
waiter.cancel()

View file

@ -0,0 +1,8 @@
import logging
access_logger = logging.getLogger('aiohttp.access')
client_logger = logging.getLogger('aiohttp.client')
internal_logger = logging.getLogger('aiohttp.internal')
server_logger = logging.getLogger('aiohttp.server')
web_logger = logging.getLogger('aiohttp.web')
ws_logger = logging.getLogger('aiohttp.websocket')

View file

@ -0,0 +1,937 @@
import base64
import binascii
import json
import re
import uuid
import warnings
import zlib
from collections import deque
from types import TracebackType
from typing import ( # noqa
TYPE_CHECKING,
Any,
Dict,
Iterator,
List,
Mapping,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from urllib.parse import parse_qsl, unquote, urlencode
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping # noqa
from .hdrs import (
CONTENT_DISPOSITION,
CONTENT_ENCODING,
CONTENT_LENGTH,
CONTENT_TRANSFER_ENCODING,
CONTENT_TYPE,
)
from .helpers import CHAR, TOKEN, parse_mimetype, reify
from .http import HeadersParser
from .payload import (
JsonPayload,
LookupError,
Order,
Payload,
StringPayload,
get_payload,
payload_type,
)
from .streams import StreamReader
__all__ = ('MultipartReader', 'MultipartWriter', 'BodyPartReader',
'BadContentDispositionHeader', 'BadContentDispositionParam',
'parse_content_disposition', 'content_disposition_filename')
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import ClientResponse # noqa
class BadContentDispositionHeader(RuntimeWarning):
pass
class BadContentDispositionParam(RuntimeWarning):
pass
def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
Dict[str, str]]:
def is_token(string: str) -> bool:
return bool(string) and TOKEN >= set(string)
def is_quoted(string: str) -> bool:
return string[0] == string[-1] == '"'
def is_rfc5987(string: str) -> bool:
return is_token(string) and string.count("'") == 2
def is_extended_param(string: str) -> bool:
return string.endswith('*')
def is_continuous_param(string: str) -> bool:
pos = string.find('*') + 1
if not pos:
return False
substring = string[pos:-1] if string.endswith('*') else string[pos:]
return substring.isdigit()
def unescape(text: str, *,
chars: str=''.join(map(re.escape, CHAR))) -> str:
return re.sub('\\\\([{}])'.format(chars), '\\1', text)
if not header:
return None, {}
disptype, *parts = header.split(';')
if not is_token(disptype):
warnings.warn(BadContentDispositionHeader(header))
return None, {}
params = {} # type: Dict[str, str]
while parts:
item = parts.pop(0)
if '=' not in item:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
key, value = item.split('=', 1)
key = key.lower().strip()
value = value.lstrip()
if key in params:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
if not is_token(key):
warnings.warn(BadContentDispositionParam(item))
continue
elif is_continuous_param(key):
if is_quoted(value):
value = unescape(value[1:-1])
elif not is_token(value):
warnings.warn(BadContentDispositionParam(item))
continue
elif is_extended_param(key):
if is_rfc5987(value):
encoding, _, value = value.split("'", 2)
encoding = encoding or 'utf-8'
else:
warnings.warn(BadContentDispositionParam(item))
continue
try:
value = unquote(value, encoding, 'strict')
except UnicodeDecodeError: # pragma: nocover
warnings.warn(BadContentDispositionParam(item))
continue
else:
failed = True
if is_quoted(value):
failed = False
value = unescape(value[1:-1].lstrip('\\/'))
elif is_token(value):
failed = False
elif parts:
# maybe just ; in filename, in any case this is just
# one case fix, for proper fix we need to redesign parser
_value = '%s;%s' % (value, parts[0])
if is_quoted(_value):
parts.pop(0)
value = unescape(_value[1:-1].lstrip('\\/'))
failed = False
if failed:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
params[key] = value
return disptype.lower(), params
def content_disposition_filename(params: Mapping[str, str],
name: str='filename') -> Optional[str]:
name_suf = '%s*' % name
if not params:
return None
elif name_suf in params:
return params[name_suf]
elif name in params:
return params[name]
else:
parts = []
fnparams = sorted((key, value)
for key, value in params.items()
if key.startswith(name_suf))
for num, (key, value) in enumerate(fnparams):
_, tail = key.split('*', 1)
if tail.endswith('*'):
tail = tail[:-1]
if tail == str(num):
parts.append(value)
else:
break
if not parts:
return None
value = ''.join(parts)
if "'" in value:
encoding, _, value = value.split("'", 2)
encoding = encoding or 'utf-8'
return unquote(value, encoding, 'strict')
return value
class MultipartResponseWrapper:
"""Wrapper around the MultipartBodyReader.
It takes care about
underlying connection and close it when it needs in.
"""
def __init__(self, resp: 'ClientResponse', stream: Any) -> None:
# TODO: add strong annotation to stream
self.resp = resp
self.stream = stream
def __aiter__(self) -> 'MultipartResponseWrapper':
return self
async def __anext__(self) -> Any:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
return part
def at_eof(self) -> bool:
"""Returns True when all response data had been read."""
return self.resp.content.at_eof()
async def next(self) -> Any:
"""Emits next multipart reader object."""
item = await self.stream.next()
if self.stream.at_eof():
await self.release()
return item
async def release(self) -> None:
"""Releases the connection gracefully, reading all the content
to the void."""
await self.resp.release()
class BodyPartReader:
"""Multipart reader for single body part."""
chunk_size = 8192
def __init__(self, boundary: bytes,
headers: Mapping[str, Optional[str]],
content: StreamReader) -> None:
self.headers = headers
self._boundary = boundary
self._content = content
self._at_eof = False
length = self.headers.get(CONTENT_LENGTH, None)
self._length = int(length) if length is not None else None
self._read_bytes = 0
# TODO: typeing.Deque is not supported by Python 3.5
self._unread = deque() # type: Any
self._prev_chunk = None # type: Optional[bytes]
self._content_eof = 0
self._cache = {} # type: Dict[str, Any]
def __aiter__(self) -> 'BodyPartReader':
return self
async def __anext__(self) -> Any:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
return part
async def next(self) -> Any:
item = await self.read()
if not item:
return None
return item
async def read(self, *, decode: bool=False) -> Any:
"""Reads body part data.
decode: Decodes data following by encoding
method from Content-Encoding header. If it missed
data remains untouched
"""
if self._at_eof:
return b''
data = bytearray()
while not self._at_eof:
data.extend((await self.read_chunk(self.chunk_size)))
if decode:
return self.decode(data)
return data
async def read_chunk(self, size: int=chunk_size) -> bytes:
"""Reads body part content chunk of the specified size.
size: chunk size
"""
if self._at_eof:
return b''
if self._length:
chunk = await self._read_chunk_from_length(size)
else:
chunk = await self._read_chunk_from_stream(size)
self._read_bytes += len(chunk)
if self._read_bytes == self._length:
self._at_eof = True
if self._at_eof:
clrf = await self._content.readline()
assert b'\r\n' == clrf, \
'reader did not read all the data or it is malformed'
return chunk
async def _read_chunk_from_length(self, size: int) -> bytes:
# Reads body part content chunk of the specified size.
# The body part must has Content-Length header with proper value.
assert self._length is not None, \
'Content-Length required for chunked read'
chunk_size = min(size, self._length - self._read_bytes)
chunk = await self._content.read(chunk_size)
return chunk
async def _read_chunk_from_stream(self, size: int) -> bytes:
# Reads content chunk of body part with unknown length.
# The Content-Length header for body part is not necessary.
assert size >= len(self._boundary) + 2, \
'Chunk size must be greater or equal than boundary length + 2'
first_chunk = self._prev_chunk is None
if first_chunk:
self._prev_chunk = await self._content.read(size)
chunk = await self._content.read(size)
self._content_eof += int(self._content.at_eof())
assert self._content_eof < 3, "Reading after EOF"
assert self._prev_chunk is not None
window = self._prev_chunk + chunk
sub = b'\r\n' + self._boundary
if first_chunk:
idx = window.find(sub)
else:
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
if idx >= 0:
# pushing boundary back to content
with warnings.catch_warnings():
warnings.filterwarnings("ignore",
category=DeprecationWarning)
self._content.unread_data(window[idx:])
if size > idx:
self._prev_chunk = self._prev_chunk[:idx]
chunk = window[len(self._prev_chunk):idx]
if not chunk:
self._at_eof = True
result = self._prev_chunk
self._prev_chunk = chunk
return result
async def readline(self) -> bytes:
"""Reads body part by line by line."""
if self._at_eof:
return b''
if self._unread:
line = self._unread.popleft()
else:
line = await self._content.readline()
if line.startswith(self._boundary):
# the very last boundary may not come with \r\n,
# so set single rules for everyone
sline = line.rstrip(b'\r\n')
boundary = self._boundary
last_boundary = self._boundary + b'--'
# ensure that we read exactly the boundary, not something alike
if sline == boundary or sline == last_boundary:
self._at_eof = True
self._unread.append(line)
return b''
else:
next_line = await self._content.readline()
if next_line.startswith(self._boundary):
line = line[:-2] # strip CRLF but only once
self._unread.append(next_line)
return line
async def release(self) -> None:
"""Like read(), but reads all the data to the void."""
if self._at_eof:
return
while not self._at_eof:
await self.read_chunk(self.chunk_size)
async def text(self, *, encoding: Optional[str]=None) -> str:
"""Like read(), but assumes that body part contains text data."""
data = await self.read(decode=True)
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
encoding = encoding or self.get_charset(default='utf-8')
return data.decode(encoding)
async def json(self, *, encoding: Optional[str]=None) -> Any:
"""Like read(), but assumes that body parts contains JSON data."""
data = await self.read(decode=True)
if not data:
return None
encoding = encoding or self.get_charset(default='utf-8')
return json.loads(data.decode(encoding))
async def form(self, *,
encoding: Optional[str]=None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contains form
urlencoded data.
"""
data = await self.read(decode=True)
if not data:
return []
if encoding is not None:
real_encoding = encoding
else:
real_encoding = self.get_charset(default='utf-8')
return parse_qsl(data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding)
def at_eof(self) -> bool:
"""Returns True if the boundary was reached or False otherwise."""
return self._at_eof
def decode(self, data: bytes) -> bytes:
"""Decodes data according the specified Content-Encoding
or Content-Transfer-Encoding headers value.
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
data = self._decode_content_transfer(data)
if CONTENT_ENCODING in self.headers:
return self._decode_content(data)
return data
def _decode_content(self, data: bytes) -> bytes:
encoding = cast(str, self.headers[CONTENT_ENCODING]).lower()
if encoding == 'deflate':
return zlib.decompress(data, -zlib.MAX_WBITS)
elif encoding == 'gzip':
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
elif encoding == 'identity':
return data
else:
raise RuntimeError('unknown content encoding: {}'.format(encoding))
def _decode_content_transfer(self, data: bytes) -> bytes:
encoding = cast(str, self.headers[CONTENT_TRANSFER_ENCODING]).lower()
if encoding == 'base64':
return base64.b64decode(data)
elif encoding == 'quoted-printable':
return binascii.a2b_qp(data)
elif encoding in ('binary', '8bit', '7bit'):
return data
else:
raise RuntimeError('unknown content transfer encoding: {}'
''.format(encoding))
def get_charset(self, default: str) -> str:
"""Returns charset parameter from Content-Type header or default."""
ctype = self.headers.get(CONTENT_TYPE, '')
mimetype = parse_mimetype(ctype)
return mimetype.parameters.get('charset', default)
@reify
def name(self) -> Optional[str]:
"""Returns name specified in Content-Disposition header or None
if missed or header is malformed.
"""
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'name')
@reify
def filename(self) -> Optional[str]:
"""Returns filename specified in Content-Disposition header or None
if missed or header is malformed.
"""
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'filename')
@payload_type(BodyPartReader, order=Order.try_first)
class BodyPartReaderPayload(Payload):
def __init__(self, value: BodyPartReader,
*args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
params = {} # type: Dict[str, str]
if value.name is not None:
params['name'] = value.name
if value.filename is not None:
params['filename'] = value.filename
if params:
self.set_content_disposition('attachment', True, **params)
async def write(self, writer: Any) -> None:
field = self._value
chunk = await field.read_chunk(size=2**16)
while chunk:
await writer.write(field.decode(chunk))
chunk = await field.read_chunk(size=2**16)
class MultipartReader:
"""Multipart body reader."""
#: Response wrapper, used when multipart readers constructs from response.
response_wrapper_cls = MultipartResponseWrapper
#: Multipart reader class, used to handle multipart/* body parts.
#: None points to type(self)
multipart_reader_cls = None
#: Body part reader class for non multipart/* content types.
part_reader_cls = BodyPartReader
def __init__(self, headers: Mapping[str, str],
content: StreamReader) -> None:
self.headers = headers
self._boundary = ('--' + self._get_boundary()).encode()
self._content = content
self._last_part = None
self._at_eof = False
self._at_bof = True
self._unread = [] # type: List[bytes]
def __aiter__(self) -> 'MultipartReader':
return self
async def __anext__(self) -> Any:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
return part
@classmethod
def from_response(cls, response: 'ClientResponse') -> Any:
"""Constructs reader instance from HTTP response.
:param response: :class:`~aiohttp.client.ClientResponse` instance
"""
obj = cls.response_wrapper_cls(response, cls(response.headers,
response.content))
return obj
def at_eof(self) -> bool:
"""Returns True if the final boundary was reached or
False otherwise.
"""
return self._at_eof
async def next(self) -> Any:
"""Emits the next multipart body part."""
# So, if we're at BOF, we need to skip till the boundary.
if self._at_eof:
return
await self._maybe_release_last_part()
if self._at_bof:
await self._read_until_first_boundary()
self._at_bof = False
else:
await self._read_boundary()
if self._at_eof: # we just read the last boundary, nothing to do there
return
self._last_part = await self.fetch_next_part()
return self._last_part
async def release(self) -> None:
"""Reads all the body parts to the void till the final boundary."""
while not self._at_eof:
item = await self.next()
if item is None:
break
await item.release()
async def fetch_next_part(self) -> Any:
"""Returns the next body part reader."""
headers = await self._read_headers()
return self._get_part_reader(headers)
def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any:
"""Dispatches the response by the `Content-Type` header, returning
suitable reader instance.
:param dict headers: Response headers
"""
ctype = headers.get(CONTENT_TYPE, '')
mimetype = parse_mimetype(ctype)
if mimetype.type == 'multipart':
if self.multipart_reader_cls is None:
return type(self)(headers, self._content)
return self.multipart_reader_cls(headers, self._content)
else:
return self.part_reader_cls(self._boundary, headers, self._content)
def _get_boundary(self) -> str:
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
assert mimetype.type == 'multipart', (
'multipart/* content type expected'
)
if 'boundary' not in mimetype.parameters:
raise ValueError('boundary missed for Content-Type: %s'
% self.headers[CONTENT_TYPE])
boundary = mimetype.parameters['boundary']
if len(boundary) > 70:
raise ValueError('boundary %r is too long (70 chars max)'
% boundary)
return boundary
async def _readline(self) -> bytes:
if self._unread:
return self._unread.pop()
return await self._content.readline()
async def _read_until_first_boundary(self) -> None:
while True:
chunk = await self._readline()
if chunk == b'':
raise ValueError("Could not find starting boundary %r"
% (self._boundary))
chunk = chunk.rstrip()
if chunk == self._boundary:
return
elif chunk == self._boundary + b'--':
self._at_eof = True
return
async def _read_boundary(self) -> None:
chunk = (await self._readline()).rstrip()
if chunk == self._boundary:
pass
elif chunk == self._boundary + b'--':
self._at_eof = True
epilogue = await self._readline()
next_line = await self._readline()
# the epilogue is expected and then either the end of input or the
# parent multipart boundary, if the parent boundary is found then
# it should be marked as unread and handed to the parent for
# processing
if next_line[:2] == b'--':
self._unread.append(next_line)
# otherwise the request is likely missing an epilogue and both
# lines should be passed to the parent for processing
# (this handles the old behavior gracefully)
else:
self._unread.extend([next_line, epilogue])
else:
raise ValueError('Invalid boundary %r, expected %r'
% (chunk, self._boundary))
async def _read_headers(self) -> 'CIMultiDictProxy[str]':
lines = [b'']
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
lines.append(chunk)
if not chunk:
break
parser = HeadersParser()
headers, raw_headers = parser.parse_headers(lines)
return headers
async def _maybe_release_last_part(self) -> None:
"""Ensures that the last read body part is read completely."""
if self._last_part is not None:
if not self._last_part.at_eof():
await self._last_part.release()
self._unread.extend(self._last_part._unread)
self._last_part = None
_Part = Tuple[Payload, str, str]
class MultipartWriter(Payload):
"""Multipart body writer."""
def __init__(self, subtype: str='mixed',
boundary: Optional[str]=None) -> None:
boundary = boundary if boundary is not None else uuid.uuid4().hex
# The underlying Payload API demands a str (utf-8), not bytes,
# so we need to ensure we don't lose anything during conversion.
# As a result, require the boundary to be ASCII only.
# In both situations.
try:
self._boundary = boundary.encode('ascii')
except UnicodeEncodeError:
raise ValueError('boundary should contain ASCII only chars') \
from None
ctype = ('multipart/{}; boundary={}'
.format(subtype, self._boundary_value))
super().__init__(None, content_type=ctype)
self._parts = [] # type: List[_Part] # noqa
def __enter__(self) -> 'MultipartWriter':
return self
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
pass
def __iter__(self) -> Iterator[_Part]:
return iter(self._parts)
def __len__(self) -> int:
return len(self._parts)
_valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
_invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
@property
def _boundary_value(self) -> str:
"""Wrap boundary parameter value in quotes, if necessary.
Reads self.boundary and returns a unicode sting.
"""
# Refer to RFCs 7231, 7230, 5234.
#
# parameter = token "=" ( token / quoted-string )
# token = 1*tchar
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
# obs-text = %x80-FF
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
# / DIGIT / ALPHA
# ; any VCHAR, except delimiters
# VCHAR = %x21-7E
value = self._boundary
if re.match(self._valid_tchar_regex, value):
return value.decode('ascii') # cannot fail
if re.search(self._invalid_qdtext_char_regex, value):
raise ValueError("boundary value contains invalid characters")
# escape %x5C and %x22
quoted_value_content = value.replace(b'\\', b'\\\\')
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
return '"' + quoted_value_content.decode('ascii') + '"'
@property
def boundary(self) -> str:
return self._boundary.decode('ascii')
def append(
self,
obj: Any,
headers: Optional['MultiMapping[str]']=None
) -> Payload:
if headers is None:
headers = CIMultiDict()
if isinstance(obj, Payload):
obj.headers.update(headers)
return self.append_payload(obj)
else:
try:
payload = get_payload(obj, headers=headers)
except LookupError:
raise TypeError('Cannot create payload from %r' % obj)
else:
return self.append_payload(payload)
def append_payload(self, payload: Payload) -> Payload:
"""Adds a new body part to multipart writer."""
# compression
encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
raise RuntimeError('unknown content encoding: {}'.format(encoding))
if encoding == 'identity':
encoding = None
# te encoding
te_encoding = payload.headers.get(
CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
raise RuntimeError('unknown content transfer encoding: {}'
''.format(te_encoding))
if te_encoding == 'binary':
te_encoding = None
# size
size = payload.size
if size is not None and not (encoding or te_encoding):
payload.headers[CONTENT_LENGTH] = str(size)
self._parts.append((payload, encoding, te_encoding)) # type: ignore
return payload
def append_json(
self,
obj: Any,
headers: Optional['MultiMapping[str]']=None
) -> Payload:
"""Helper to append JSON part."""
if headers is None:
headers = CIMultiDict()
return self.append_payload(JsonPayload(obj, headers=headers))
def append_form(
self,
obj: Union[Sequence[Tuple[str, str]],
Mapping[str, str]],
headers: Optional['MultiMapping[str]']=None
) -> Payload:
"""Helper to append form urlencoded part."""
assert isinstance(obj, (Sequence, Mapping))
if headers is None:
headers = CIMultiDict()
if isinstance(obj, Mapping):
obj = list(obj.items())
data = urlencode(obj, doseq=True)
return self.append_payload(
StringPayload(data, headers=headers,
content_type='application/x-www-form-urlencoded'))
@property
def size(self) -> Optional[int]:
"""Size of the payload."""
if not self._parts:
return 0
total = 0
for part, encoding, te_encoding in self._parts:
if encoding or te_encoding or part.size is None:
return None
total += int(
2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n'
part.size + len(part._binary_headers) +
2 # b'\r\n'
)
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
return total
async def write(self, writer: Any,
close_boundary: bool=True) -> None:
"""Write body."""
if not self._parts:
return
for part, encoding, te_encoding in self._parts:
await writer.write(b'--' + self._boundary + b'\r\n')
await writer.write(part._binary_headers)
if encoding or te_encoding:
w = MultipartPayloadWriter(writer)
if encoding:
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore
await w.write_eof()
else:
await part.write(writer)
await writer.write(b'\r\n')
if close_boundary:
await writer.write(b'--' + self._boundary + b'--\r\n')
class MultipartPayloadWriter:
def __init__(self, writer: Any) -> None:
self._writer = writer
self._encoding = None # type: Optional[str]
self._compress = None # type: Any
self._encoding_buffer = None # type: Optional[bytearray]
def enable_encoding(self, encoding: str) -> None:
if encoding == 'base64':
self._encoding = encoding
self._encoding_buffer = bytearray()
elif encoding == 'quoted-printable':
self._encoding = 'quoted-printable'
def enable_compression(self, encoding: str='deflate') -> None:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
self._compress = zlib.compressobj(wbits=zlib_mode)
async def write_eof(self) -> None:
if self._compress is not None:
chunk = self._compress.flush()
if chunk:
self._compress = None
await self.write(chunk)
if self._encoding == 'base64':
if self._encoding_buffer:
await self._writer.write(base64.b64encode(
self._encoding_buffer))
async def write(self, chunk: bytes) -> None:
if self._compress is not None:
if chunk:
chunk = self._compress.compress(chunk)
if not chunk:
return
if self._encoding == 'base64':
buf = self._encoding_buffer
assert buf is not None
buf.extend(chunk)
if buf:
div, mod = divmod(len(buf), 3)
enc_chunk, self._encoding_buffer = (
buf[:div * 3], buf[div * 3:])
if enc_chunk:
b64chunk = base64.b64encode(enc_chunk)
await self._writer.write(b64chunk)
elif self._encoding == 'quoted-printable':
await self._writer.write(binascii.b2a_qp(chunk))
else:
await self._writer.write(chunk)

View file

@ -0,0 +1,456 @@
import asyncio
import enum
import io
import json
import mimetypes
import os
import warnings
from abc import ABC, abstractmethod
from itertools import chain
from typing import (
IO,
TYPE_CHECKING,
Any,
ByteString,
Dict,
Iterable,
Optional,
Text,
TextIO,
Tuple,
Type,
Union,
)
from multidict import CIMultiDict
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import (
PY_36,
content_disposition_header,
guess_filename,
parse_mimetype,
sentinel,
)
from .streams import DEFAULT_LIMIT, StreamReader
from .typedefs import JSONEncoder, _CIMultiDict
__all__ = ('PAYLOAD_REGISTRY', 'get_payload', 'payload_type', 'Payload',
'BytesPayload', 'StringPayload',
'IOBasePayload', 'BytesIOPayload', 'BufferedReaderPayload',
'TextIOPayload', 'StringIOPayload', 'JsonPayload',
'AsyncIterablePayload')
TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB
if TYPE_CHECKING: # pragma: no cover
from typing import List # noqa
class LookupError(Exception):
pass
class Order(str, enum.Enum):
normal = 'normal'
try_first = 'try_first'
try_last = 'try_last'
def get_payload(data: Any, *args: Any, **kwargs: Any) -> 'Payload':
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
def register_payload(factory: Type['Payload'],
type: Any,
*,
order: Order=Order.normal) -> None:
PAYLOAD_REGISTRY.register(factory, type, order=order)
class payload_type:
def __init__(self, type: Any, *, order: Order=Order.normal) -> None:
self.type = type
self.order = order
def __call__(self, factory: Type['Payload']) -> Type['Payload']:
register_payload(factory, self.type, order=self.order)
return factory
class PayloadRegistry:
"""Payload registry.
note: we need zope.interface for more efficient adapter search
"""
def __init__(self) -> None:
self._first = [] # type: List[Tuple[Type[Payload], Any]]
self._normal = [] # type: List[Tuple[Type[Payload], Any]]
self._last = [] # type: List[Tuple[Type[Payload], Any]]
def get(self,
data: Any,
*args: Any,
_CHAIN: Any=chain,
**kwargs: Any) -> 'Payload':
if isinstance(data, Payload):
return data
for factory, type in _CHAIN(self._first, self._normal, self._last):
if isinstance(data, type):
return factory(data, *args, **kwargs)
raise LookupError()
def register(self,
factory: Type['Payload'],
type: Any,
*,
order: Order=Order.normal) -> None:
if order is Order.try_first:
self._first.append((factory, type))
elif order is Order.normal:
self._normal.append((factory, type))
elif order is Order.try_last:
self._last.append((factory, type))
else:
raise ValueError("Unsupported order {!r}".format(order))
class Payload(ABC):
_default_content_type = 'application/octet-stream' # type: str
_size = None # type: Optional[int]
def __init__(self,
value: Any,
headers: Optional[
Union[
_CIMultiDict,
Dict[str, str],
Iterable[Tuple[str, str]]
]
] = None,
content_type: Optional[str]=sentinel,
filename: Optional[str]=None,
encoding: Optional[str]=None,
**kwargs: Any) -> None:
self._encoding = encoding
self._filename = filename
self._headers = CIMultiDict() # type: _CIMultiDict
self._value = value
if content_type is not sentinel and content_type is not None:
self._headers[hdrs.CONTENT_TYPE] = content_type
elif self._filename is not None:
content_type = mimetypes.guess_type(self._filename)[0]
if content_type is None:
content_type = self._default_content_type
self._headers[hdrs.CONTENT_TYPE] = content_type
else:
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
self._headers.update(headers or {})
@property
def size(self) -> Optional[int]:
"""Size of the payload."""
return self._size
@property
def filename(self) -> Optional[str]:
"""Filename of the payload."""
return self._filename
@property
def headers(self) -> _CIMultiDict:
"""Custom item headers"""
return self._headers
@property
def _binary_headers(self) -> bytes:
return ''.join(
[k + ': ' + v + '\r\n' for k, v in self.headers.items()]
).encode('utf-8') + b'\r\n'
@property
def encoding(self) -> Optional[str]:
"""Payload encoding"""
return self._encoding
@property
def content_type(self) -> str:
"""Content type"""
return self._headers[hdrs.CONTENT_TYPE]
def set_content_disposition(self,
disptype: str,
quote_fields: bool=True,
**params: Any) -> None:
"""Sets ``Content-Disposition`` header."""
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, **params)
@abstractmethod
async def write(self, writer: AbstractStreamWriter) -> None:
"""Write payload.
writer is an AbstractStreamWriter instance:
"""
class BytesPayload(Payload):
def __init__(self,
value: ByteString,
*args: Any,
**kwargs: Any) -> None:
if not isinstance(value, (bytes, bytearray, memoryview)):
raise TypeError("value argument must be byte-ish, not (!r)"
.format(type(value)))
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
super().__init__(value, *args, **kwargs)
self._size = len(value)
if self._size > TOO_LARGE_BYTES_BODY:
if PY_36:
kwargs = {'source': self}
else:
kwargs = {}
warnings.warn("Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead", ResourceWarning,
**kwargs)
async def write(self, writer: AbstractStreamWriter) -> None:
await writer.write(self._value)
class StringPayload(BytesPayload):
def __init__(self,
value: Text,
*args: Any,
encoding: Optional[str]=None,
content_type: Optional[str]=None,
**kwargs: Any) -> None:
if encoding is None:
if content_type is None:
real_encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
else:
mimetype = parse_mimetype(content_type)
real_encoding = mimetype.parameters.get('charset', 'utf-8')
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
real_encoding = encoding
super().__init__(
value.encode(real_encoding),
encoding=real_encoding,
content_type=content_type,
*args,
**kwargs,
)
class StringIOPayload(StringPayload):
def __init__(self,
value: IO[str],
*args: Any,
**kwargs: Any) -> None:
super().__init__(value.read(), *args, **kwargs)
class IOBasePayload(Payload):
def __init__(self,
value: IO[Any],
disposition: str='attachment',
*args: Any,
**kwargs: Any) -> None:
if 'filename' not in kwargs:
kwargs['filename'] = guess_filename(value)
super().__init__(value, *args, **kwargs)
if self._filename is not None and disposition is not None:
if hdrs.CONTENT_DISPOSITION not in self.headers:
self.set_content_disposition(
disposition, filename=self._filename
)
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
while chunk:
await writer.write(chunk)
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
finally:
await loop.run_in_executor(None, self._value.close)
class TextIOPayload(IOBasePayload):
def __init__(self,
value: TextIO,
*args: Any,
encoding: Optional[str]=None,
content_type: Optional[str]=None,
**kwargs: Any) -> None:
if encoding is None:
if content_type is None:
encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get('charset', 'utf-8')
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
super().__init__(
value,
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
@property
def size(self) -> Optional[int]:
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except OSError:
return None
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
while chunk:
await writer.write(chunk.encode(self._encoding))
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
finally:
await loop.run_in_executor(None, self._value.close)
class BytesIOPayload(IOBasePayload):
@property
def size(self) -> int:
position = self._value.tell()
end = self._value.seek(0, os.SEEK_END)
self._value.seek(position)
return end - position
class BufferedReaderPayload(IOBasePayload):
@property
def size(self) -> Optional[int]:
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except OSError:
# data.fileno() is not supported, e.g.
# io.BufferedReader(io.BytesIO(b'data'))
return None
class JsonPayload(BytesPayload):
def __init__(self,
value: Any,
encoding: str='utf-8',
content_type: str='application/json',
dumps: JSONEncoder=json.dumps,
*args: Any,
**kwargs: Any) -> None:
super().__init__(
dumps(value).encode(encoding),
content_type=content_type, encoding=encoding, *args, **kwargs)
if TYPE_CHECKING: # pragma: no cover
from typing import AsyncIterator, AsyncIterable
_AsyncIterator = AsyncIterator[bytes]
_AsyncIterable = AsyncIterable[bytes]
else:
from collections.abc import AsyncIterable, AsyncIterator
_AsyncIterator = AsyncIterator
_AsyncIterable = AsyncIterable
class AsyncIterablePayload(Payload):
_iter = None # type: Optional[_AsyncIterator]
def __init__(self,
value: _AsyncIterable,
*args: Any,
**kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError("value argument must support "
"collections.abc.AsyncIterablebe interface, "
"got {!r}".format(type(value)))
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
super().__init__(value, *args, **kwargs)
self._iter = value.__aiter__()
async def write(self, writer: AbstractStreamWriter) -> None:
if self._iter:
try:
# iter is not None check prevents rare cases
# when the case iterable is used twice
while True:
chunk = await self._iter.__anext__()
await writer.write(chunk)
except StopAsyncIteration:
self._iter = None
class StreamReaderPayload(AsyncIterablePayload):
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value.iter_any(), *args, **kwargs)
PAYLOAD_REGISTRY = PayloadRegistry()
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
PAYLOAD_REGISTRY.register(StringPayload, str)
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
PAYLOAD_REGISTRY.register(
BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
# try_last for giving a chance to more specialized async interables like
# multidict.BodyPartReaderPayload override the default
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable,
order=Order.try_last)

View file

@ -0,0 +1,74 @@
""" Payload implemenation for coroutines as data provider.
As a simple case, you can upload data from file::
@aiohttp.streamer
async def file_sender(writer, file_name=None):
with open(file_name, 'rb') as f:
chunk = f.read(2**16)
while chunk:
await writer.write(chunk)
chunk = f.read(2**16)
Then you can use `file_sender` like this:
async with session.post('http://httpbin.org/post',
data=file_sender(file_name='huge_file')) as resp:
print(await resp.text())
..note:: Coroutine must accept `writer` as first argument
"""
import asyncio
import warnings
from typing import Any, Awaitable, Callable, Dict, Tuple
from .abc import AbstractStreamWriter
from .payload import Payload, payload_type
__all__ = ('streamer',)
class _stream_wrapper:
def __init__(self,
coro: Callable[..., Awaitable[None]],
args: Tuple[Any, ...],
kwargs: Dict[str, Any]) -> None:
self.coro = asyncio.coroutine(coro)
self.args = args
self.kwargs = kwargs
async def __call__(self, writer: AbstractStreamWriter) -> None:
await self.coro(writer, *self.args, **self.kwargs)
class streamer:
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
warnings.warn("@streamer is deprecated, use async generators instead",
DeprecationWarning,
stacklevel=2)
self.coro = coro
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
return _stream_wrapper(self.coro, args, kwargs)
@payload_type(_stream_wrapper)
class StreamWrapperPayload(Payload):
async def write(self, writer: AbstractStreamWriter) -> None:
await self._value(writer)
@payload_type(streamer)
class StreamPayload(StreamWrapperPayload):
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
super().__init__(value(), *args, **kwargs)
async def write(self, writer: AbstractStreamWriter) -> None:
await self._value(writer)

View file

@ -0,0 +1 @@
Marker

View file

@ -0,0 +1,337 @@
import asyncio
import contextlib
import warnings
from collections.abc import Callable
import pytest
from aiohttp.helpers import isasyncgenfunction
from aiohttp.web import Application
from .test_utils import (
BaseTestServer,
RawTestServer,
TestClient,
TestServer,
loop_context,
setup_test_loop,
teardown_test_loop,
)
from .test_utils import unused_port as _unused_port
try:
import uvloop
except ImportError: # pragma: no cover
uvloop = None
try:
import tokio
except ImportError: # pragma: no cover
tokio = None
def pytest_addoption(parser): # type: ignore
parser.addoption(
'--aiohttp-fast', action='store_true', default=False,
help='run tests faster by disabling extra checks')
parser.addoption(
'--aiohttp-loop', action='store', default='pyloop',
help='run tests with specific loop: pyloop, uvloop, tokio or all')
parser.addoption(
'--aiohttp-enable-loop-debug', action='store_true', default=False,
help='enable event loop debug mode')
def pytest_fixture_setup(fixturedef): # type: ignore
"""
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
"""
func = fixturedef.func
if isasyncgenfunction(func):
# async generator fixture
is_async_gen = True
elif asyncio.iscoroutinefunction(func):
# regular async fixture
is_async_gen = False
else:
# not an async fixture, nothing to do
return
strip_request = False
if 'request' not in fixturedef.argnames:
fixturedef.argnames += ('request',)
strip_request = True
def wrapper(*args, **kwargs): # type: ignore
request = kwargs['request']
if strip_request:
del kwargs['request']
# if neither the fixture nor the test use the 'loop' fixture,
# 'getfixturevalue' will fail because the test is not parameterized
# (this can be removed someday if 'loop' is no longer parameterized)
if 'loop' not in request.fixturenames:
raise Exception(
"Asynchronous fixtures must depend on the 'loop' fixture or "
"be used in tests depending from it."
)
_loop = request.getfixturevalue('loop')
if is_async_gen:
# for async generators, we need to advance the generator once,
# then advance it again in a finalizer
gen = func(*args, **kwargs)
def finalizer(): # type: ignore
try:
return _loop.run_until_complete(gen.__anext__())
except StopAsyncIteration: # NOQA
pass
request.addfinalizer(finalizer)
return _loop.run_until_complete(gen.__anext__())
else:
return _loop.run_until_complete(func(*args, **kwargs))
fixturedef.func = wrapper
@pytest.fixture
def fast(request): # type: ignore
"""--fast config option"""
return request.config.getoption('--aiohttp-fast')
@pytest.fixture
def loop_debug(request): # type: ignore
"""--enable-loop-debug config option"""
return request.config.getoption('--aiohttp-enable-loop-debug')
@contextlib.contextmanager
def _runtime_warning_context(): # type: ignore
"""
Context manager which checks for RuntimeWarnings, specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.
If RuntimeWarnings occur in the context a RuntimeError is raised.
"""
with warnings.catch_warnings(record=True) as _warnings:
yield
rw = ['{w.filename}:{w.lineno}:{w.message}'.format(w=w)
for w in _warnings # type: ignore
if w.category == RuntimeWarning]
if rw:
raise RuntimeError('{} Runtime Warning{},\n{}'.format(
len(rw),
'' if len(rw) == 1 else 's',
'\n'.join(rw)
))
@contextlib.contextmanager
def _passthrough_loop_context(loop, fast=False): # type: ignore
"""
setups and tears down a loop unless one is passed in via the loop
argument when it's passed straight through.
"""
if loop:
# loop already exists, pass it straight through
yield loop
else:
# this shadows loop_context's standard behavior
loop = setup_test_loop()
yield loop
teardown_test_loop(loop, fast=fast)
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore
"""
Fix pytest collecting for coroutines.
"""
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
return list(collector._genfunctions(name, obj))
def pytest_pyfunc_call(pyfuncitem): # type: ignore
"""
Run coroutines in an event loop instead of a normal function call.
"""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get('loop', None)
with _runtime_warning_context():
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
testargs = {arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames}
_loop.run_until_complete(pyfuncitem.obj(**testargs))
return True
def pytest_generate_tests(metafunc): # type: ignore
if 'loop_factory' not in metafunc.fixturenames:
return
loops = metafunc.config.option.aiohttp_loop
avail_factories = {'pyloop': asyncio.DefaultEventLoopPolicy}
if uvloop is not None: # pragma: no cover
avail_factories['uvloop'] = uvloop.EventLoopPolicy
if tokio is not None: # pragma: no cover
avail_factories['tokio'] = tokio.EventLoopPolicy
if loops == 'all':
loops = 'pyloop,uvloop?,tokio?'
factories = {} # type: ignore
for name in loops.split(','):
required = not name.endswith('?')
name = name.strip(' ?')
if name not in avail_factories: # pragma: no cover
if required:
raise ValueError(
"Unknown loop '%s', available loops: %s" % (
name, list(factories.keys())))
else:
continue
factories[name] = avail_factories[name]
metafunc.parametrize("loop_factory",
list(factories.values()),
ids=list(factories.keys()))
@pytest.fixture
def loop(loop_factory, fast, loop_debug): # type: ignore
"""Return an instance of the event loop."""
policy = loop_factory()
asyncio.set_event_loop_policy(policy)
with loop_context(fast=fast) as _loop:
if loop_debug:
_loop.set_debug(True) # pragma: no cover
asyncio.set_event_loop(_loop)
yield _loop
@pytest.fixture
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning)
return aiohttp_unused_port
@pytest.fixture
def aiohttp_unused_port(): # type: ignore
"""Return a port that is unused on the current host."""
return _unused_port
@pytest.fixture
def aiohttp_server(loop): # type: ignore
"""Factory to create a TestServer instance, given an app.
aiohttp_server(app, **kwargs)
"""
servers = []
async def go(app, *, port=None, **kwargs): # type: ignore
server = TestServer(app, port=port)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
return server
yield go
async def finalize(): # type: ignore
while servers:
await servers.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
def test_server(aiohttp_server): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_server fixture instead",
DeprecationWarning)
return aiohttp_server
@pytest.fixture
def aiohttp_raw_server(loop): # type: ignore
"""Factory to create a RawTestServer instance, given a web handler.
aiohttp_raw_server(handler, **kwargs)
"""
servers = []
async def go(handler, *, port=None, **kwargs): # type: ignore
server = RawTestServer(handler, port=port)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
return server
yield go
async def finalize(): # type: ignore
while servers:
await servers.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_raw_server fixture instead",
DeprecationWarning)
return aiohttp_raw_server
@pytest.fixture
def aiohttp_client(loop): # type: ignore
"""Factory to create a TestClient instance.
aiohttp_client(app, **kwargs)
aiohttp_client(server, **kwargs)
aiohttp_client(raw_server, **kwargs)
"""
clients = []
async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore
if (isinstance(__param, Callable) and # type: ignore
not isinstance(__param, (Application, BaseTestServer))):
__param = __param(loop, *args, **kwargs)
kwargs = {}
else:
assert not args, "args should be empty"
if isinstance(__param, Application):
server_kwargs = server_kwargs or {}
server = TestServer(__param, loop=loop, **server_kwargs)
client = TestClient(server, loop=loop, **kwargs)
elif isinstance(__param, BaseTestServer):
client = TestClient(__param, loop=loop, **kwargs)
else:
raise ValueError("Unknown argument type: %r" % type(__param))
await client.start_server()
clients.append(client)
return client
yield go
async def finalize(): # type: ignore
while clients:
await clients.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
def test_client(aiohttp_client): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_client fixture instead",
DeprecationWarning)
return aiohttp_client

View file

@ -0,0 +1,112 @@
import asyncio
import socket
from typing import Any, Dict, List, Optional
from .abc import AbstractResolver
from .helpers import get_running_loop
__all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver')
try:
import aiodns
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
except ImportError: # pragma: no cover
aiodns = None
aiodns_default = False
class ThreadedResolver(AbstractResolver):
"""Use Executor for synchronous getaddrinfo() calls, which defaults to
concurrent.futures.ThreadPoolExecutor.
"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
self._loop = get_running_loop(loop)
async def resolve(self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
host, port, type=socket.SOCK_STREAM, family=family)
hosts = []
for family, _, proto, _, address in infos:
hosts.append(
{'hostname': host,
'host': address[0], 'port': address[1],
'family': family, 'proto': proto,
'flags': socket.AI_NUMERICHOST})
return hosts
async def close(self) -> None:
pass
class AsyncResolver(AbstractResolver):
"""Use the `aiodns` package to make asynchronous DNS lookups"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None,
*args: Any, **kwargs: Any) -> None:
if aiodns is None:
raise RuntimeError("Resolver requires aiodns library")
self._loop = get_running_loop(loop)
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
if not hasattr(self._resolver, 'gethostbyname'):
# aiodns 1.1 is not available, fallback to DNSResolver.query
self.resolve = self._resolve_with_query # type: ignore
async def resolve(self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
try:
resp = await self._resolver.gethostbyname(host, family)
except aiodns.error.DNSError as exc:
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
raise OSError(msg) from exc
hosts = []
for address in resp.addresses:
hosts.append(
{'hostname': host,
'host': address, 'port': port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST})
if not hosts:
raise OSError("DNS lookup failed")
return hosts
async def _resolve_with_query(
self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
if family == socket.AF_INET6:
qtype = 'AAAA'
else:
qtype = 'A'
try:
resp = await self._resolver.query(host, qtype)
except aiodns.error.DNSError as exc:
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
raise OSError(msg) from exc
hosts = []
for rr in resp:
hosts.append(
{'hostname': host,
'host': rr.host, 'port': port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST})
if not hosts:
raise OSError("DNS lookup failed")
return hosts
async def close(self) -> None:
return self._resolver.cancel()
DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver

View file

@ -0,0 +1,34 @@
from aiohttp.frozenlist import FrozenList
__all__ = ('Signal',)
class Signal(FrozenList):
"""Coroutine-based signal implementation.
To connect a callback to a signal, use any list method.
Signals are fired using the send() coroutine, which takes named
arguments.
"""
__slots__ = ('_owner',)
def __init__(self, owner):
super().__init__()
self._owner = owner
def __repr__(self):
return '<Signal owner={}, frozen={}, {!r}>'.format(self._owner,
self.frozen,
list(self))
async def send(self, *args, **kwargs):
"""
Sends data to all registered receivers.
"""
if not self.frozen:
raise RuntimeError("Cannot send non-frozen signal.")
for receiver in self:
await receiver(*args, **kwargs) # type: ignore

View file

@ -0,0 +1,18 @@
from typing import Any, Generic, TypeVar
from aiohttp.frozenlist import FrozenList
__all__ = ('Signal',)
_T = TypeVar('_T')
class Signal(FrozenList[_T], Generic[_T]):
def __init__(self, owner: Any) -> None: ...
def __repr__(self) -> str: ...
async def send(self, *args: Any, **kwargs: Any) -> None: ...

Some files were not shown because too many files have changed in this diff Show more