Added linux venv

This commit is contained in:
Quentin 2021-01-20 16:03:44 -06:00
parent 80c3fdf5be
commit a124119353
877 changed files with 209229 additions and 0 deletions

76
venv-linux/bin/activate Normal file
View File

@ -0,0 +1,76 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "$1" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/home/officereso/Discord_Bot/venv-linux"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
if [ "x(venv-linux) " != x ] ; then
PS1="(venv-linux) ${PS1:-}"
else
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
else
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
fi
fi
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r
fi

View File

@ -0,0 +1,37 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/home/officereso/Discord_Bot/venv-linux"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
if ("venv-linux" != "") then
set env_name = "venv-linux"
else
if (`basename "VIRTUAL_ENV"` == "__") then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
else
set env_name = `basename "$VIRTUAL_ENV"`
endif
endif
set prompt = "[$env_name] $prompt"
unset env_name
endif
alias pydoc python -m pydoc
rehash

View File

@ -0,0 +1,75 @@
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
# you cannot run it directly
function deactivate -d "Exit virtualenv and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
functions -e fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
# Self destruct!
functions -e deactivate
end
end
# unset irrelevant variables
deactivate nondestructive
set -gx VIRTUAL_ENV "/home/officereso/Discord_Bot/venv-linux"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# unset PYTHONHOME if set
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# save the current fish_prompt function as the function _old_fish_prompt
functions -c fish_prompt _old_fish_prompt
# with the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command
set -l old_status $status
# Prompt override?
if test -n "(venv-linux) "
printf "%s%s" "(venv-linux) " (set_color normal)
else
# ...Otherwise, prepend env
set -l _checkbase (basename "$VIRTUAL_ENV")
if test $_checkbase = "__"
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
else
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
end
end
# Restore the return status of the previous command.
echo "exit $old_status" | .
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end

11
venv-linux/bin/chardetect Executable file
View File

@ -0,0 +1,11 @@
#!/home/officereso/Discord_Bot/venv-linux/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
venv-linux/bin/easy_install Executable file
View File

@ -0,0 +1,11 @@
#!/home/officereso/Discord_Bot/venv-linux/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
venv-linux/bin/easy_install-3.6 Executable file
View File

@ -0,0 +1,11 @@
#!/home/officereso/Discord_Bot/venv-linux/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
venv-linux/bin/pip Executable file
View File

@ -0,0 +1,11 @@
#!/home/officereso/Discord_Bot/venv-linux/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
venv-linux/bin/pip3 Executable file
View File

@ -0,0 +1,11 @@
#!/home/officereso/Discord_Bot/venv-linux/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
venv-linux/bin/pip3.6 Executable file
View File

@ -0,0 +1,11 @@
#!/home/officereso/Discord_Bot/venv-linux/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
venv-linux/bin/python Symbolic link
View File

@ -0,0 +1 @@
python3

1
venv-linux/bin/python3 Symbolic link
View File

@ -0,0 +1 @@
/usr/bin/python3

View File

@ -0,0 +1,19 @@
Copyright (c) 2010, 2013 PyMySQL contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,189 @@
Metadata-Version: 2.1
Name: PyMySQL
Version: 0.10.1
Summary: Pure Python MySQL Driver
Home-page: https://github.com/PyMySQL/PyMySQL/
Author: yutaka.matsubara
Author-email: yutaka.matsubara@gmail.com
Maintainer: Inada Naoki
Maintainer-email: songofacandy@gmail.com
License: "MIT"
Project-URL: Documentation, https://pymysql.readthedocs.io/
Keywords: MySQL
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Database
Provides-Extra: ed25519
Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
Provides-Extra: rsa
Requires-Dist: cryptography ; extra == 'rsa'
.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest
:target: https://pymysql.readthedocs.io/
:alt: Documentation Status
.. image:: https://badge.fury.io/py/PyMySQL.svg
:target: https://badge.fury.io/py/PyMySQL
.. image:: https://travis-ci.org/PyMySQL/PyMySQL.svg?branch=master
:target: https://travis-ci.org/PyMySQL/PyMySQL
.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master
.. image:: https://img.shields.io/badge/license-MIT-blue.svg
:target: https://github.com/PyMySQL/PyMySQL/blob/master/LICENSE
PyMySQL
=======
.. contents:: Table of Contents
:local:
This package contains a pure-Python MySQL client library, based on `PEP 249`_.
Most public APIs are compatible with mysqlclient and MySQLdb.
NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`,
`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_.
But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover
their usecase.
.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/
Requirements
-------------
* Python -- one of the following:
- CPython_ : 2.7 and >= 3.5
- PyPy_ : Latest version
* MySQL Server -- one of the following:
- MySQL_ >= 5.5
- MariaDB_ >= 5.5
.. _CPython: https://www.python.org/
.. _PyPy: https://pypy.org/
.. _MySQL: https://www.mysql.com/
.. _MariaDB: https://mariadb.org/
Installation
------------
Package is uploaded on `PyPI <https://pypi.org/project/PyMySQL>`_.
You can install it with pip::
$ python3 -m pip install PyMySQL
To use "sha256_password" or "caching_sha2_password" for authenticate,
you need to install additional dependency::
$ python3 -m pip install PyMySQL[rsa]
To use MariaDB's "ed25519" authentication method, you need to install
additional dependency::
$ python3 -m pip install PyMySQL[ed25519]
Documentation
-------------
Documentation is available online: https://pymysql.readthedocs.io/
For support, please refer to the `StackOverflow
<https://stackoverflow.com/questions/tagged/pymysql>`_.
Example
-------
The following examples make use of a simple table
.. code:: sql
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(255) COLLATE utf8_bin NOT NULL,
`password` varchar(255) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin
AUTO_INCREMENT=1 ;
.. code:: python
import pymysql.cursors
# Connect to the database
connection = pymysql.connect(host='localhost',
user='user',
password='passwd',
db='db',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
cursor.execute(sql, ('webmaster@python.org',))
result = cursor.fetchone()
print(result)
finally:
connection.close()
This example will print:
.. code:: python
{'password': 'very-secret', 'id': 1}
Resources
---------
* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/
* MySQL Reference Manuals: https://dev.mysql.com/doc/
* MySQL client/server protocol:
https://dev.mysql.com/doc/internals/en/client-server-protocol.html
* "Connector" channel in MySQL Community Slack:
https://lefred.be/mysql-community-on-slack/
* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users
License
-------
PyMySQL is released under the MIT License. See LICENSE for more information.

View File

@ -0,0 +1,49 @@
pymysql/__init__.py,sha256=KDHcmnEoEDMmRPNO5JFcxb7lsypDmwGn5Td-f-X6xDY,4733
pymysql/_auth.py,sha256=pEeHBpQ15h2wfj6k7np6LVHVz34whEXSs5KrqeYtDGw,9564
pymysql/_compat.py,sha256=DSxMV2ib-rhIuQIKiXX44yds_0bN2M_RddfYQiSdB6U,481
pymysql/_socketio.py,sha256=smsw4wudNM4CKl85uis8QHfjDhz2iXQRvl8QV4TmB1w,4049
pymysql/charset.py,sha256=zaaRbEQrFiE0iCd3AB52WJY9VqVxQcp8sYcoPDlPdWI,10308
pymysql/connections.py,sha256=xR0gWxvQ6IxBcFhY9JPmYRCcvs6xSnRKUq-DZ6MpfNY,49010
pymysql/converters.py,sha256=kUT2KQdkqNTuSxzURVnQKS1ZcatoFTUfYe5b5QSJuRI,11055
pymysql/cursors.py,sha256=eiP_oTDi1MM5EYLHoecwbv5BXWJ1qEjfK8Uy3SjGEcs,16250
pymysql/err.py,sha256=Vdrt2rVaSePVlB_uy0JNoeN6zYBt0_mM1UFDighLgNM,3734
pymysql/optionfile.py,sha256=4yW8A7aAR2Aild7ibLOCzIlTCcYd90PtR8LRGJSZs8o,658
pymysql/protocol.py,sha256=9hAfVK-g4i53gHMoGj9QrPApywMYVM8oxGAuKb_-PXo,12071
pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
pymysql/util.py,sha256=jKPts8cOMIXDndjsV3783VW-iq9uMxETWqfHP6Bd-Zo,180
pymysql/constants/CLIENT.py,sha256=cPMxnQQbBG6xqaEDwqzggTfWIuJQ1Oy7HrIgw_vgpo4,853
pymysql/constants/COMMAND.py,sha256=ypGdEUmi8m9cdBZ3rDU6mb7bsIyu9ldCDvc4pNF7V70,680
pymysql/constants/CR.py,sha256=5ojVkbisyw7Qo_cTNpnHYvV6xHRZXK39Qqv8tjGbIbg,2228
pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
pymysql/constants/FIELD_TYPE.py,sha256=yHZLSyQewMxTDx4PLrI1H_iwH2FnsrgBZFa56UG2HiQ,372
pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
pymysql/constants/SERVER_STATUS.py,sha256=KogVCOrV-S5aAFwyVKeKgua13nwdt1WFyHagjCZbcpM,334
pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
PyMySQL-0.10.1.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
PyMySQL-0.10.1.dist-info/METADATA,sha256=SP0KPSfmgNJ2ujhGRrRRiWOodzv62BfYnbY1OXX3DTI,5481
PyMySQL-0.10.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
PyMySQL-0.10.1.dist-info/pbr.json,sha256=Lqvh8-9N7qS6SLUlEJ5GDLWioQcvR9n1WWjMEfJ5mv8,47
PyMySQL-0.10.1.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
PyMySQL-0.10.1.dist-info/RECORD,,
PyMySQL-0.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pymysql/__pycache__/connections.cpython-36.pyc,,
pymysql/__pycache__/times.cpython-36.pyc,,
pymysql/__pycache__/_socketio.cpython-36.pyc,,
pymysql/__pycache__/__init__.cpython-36.pyc,,
pymysql/__pycache__/_auth.cpython-36.pyc,,
pymysql/__pycache__/converters.cpython-36.pyc,,
pymysql/__pycache__/_compat.cpython-36.pyc,,
pymysql/__pycache__/optionfile.cpython-36.pyc,,
pymysql/__pycache__/cursors.cpython-36.pyc,,
pymysql/__pycache__/err.cpython-36.pyc,,
pymysql/__pycache__/util.cpython-36.pyc,,
pymysql/__pycache__/charset.cpython-36.pyc,,
pymysql/__pycache__/protocol.cpython-36.pyc,,
pymysql/constants/__pycache__/COMMAND.cpython-36.pyc,,
pymysql/constants/__pycache__/__init__.cpython-36.pyc,,
pymysql/constants/__pycache__/CR.cpython-36.pyc,,
pymysql/constants/__pycache__/CLIENT.cpython-36.pyc,,
pymysql/constants/__pycache__/SERVER_STATUS.cpython-36.pyc,,
pymysql/constants/__pycache__/FLAG.cpython-36.pyc,,
pymysql/constants/__pycache__/ER.cpython-36.pyc,,
pymysql/constants/__pycache__/FIELD_TYPE.cpython-36.pyc,,

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View File

@ -0,0 +1 @@
{"is_release": false, "git_version": "08bac52"}

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,661 @@
Metadata-Version: 2.1
Name: aiohttp
Version: 3.6.3
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Author: Nikolay Kim
Author-email: fafhrd91@gmail.com
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
Maintainer-email: aio-libs@googlegroups.com
License: Apache 2
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.5.3
Requires-Dist: attrs (>=17.3.0)
Requires-Dist: chardet (<4.0,>=2.0)
Requires-Dist: multidict (<5.0,>=4.5)
Requires-Dist: async-timeout (<4.0,>=3.0)
Requires-Dist: yarl (<1.6.0,>=1.0)
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
Provides-Extra: speedups
Requires-Dist: aiodns ; extra == 'speedups'
Requires-Dist: brotlipy ; extra == 'speedups'
Requires-Dist: cchardet ; extra == 'speedups'
==================================
Async http client/server framework
==================================
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
:height: 64px
:width: 64px
:alt: aiohttp logo
|
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
:target: https://travis-ci.com/aio-libs/aiohttp
:align: right
:alt: Travis status for master branch
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
:align: right
:alt: AppVeyor status for master branch
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/aiohttp
:alt: codecov.io status for master branch
.. image:: https://badge.fury.io/py/aiohttp.svg
:target: https://pypi.org/project/aiohttp
:alt: Latest PyPI package version
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
Key Features
============
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
- Provides Web-server with middlewares and pluggable routing.
Getting started
===============
Client
------
To get something from the web:
.. code-block:: python
import aiohttp
import asyncio
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://python.org')
print(html)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Server
------
An example using a simple server:
.. code-block:: python
# examples/server_simple.py
from aiohttp import web
async def handle(request):
name = request.match_info.get('name', "Anonymous")
text = "Hello, " + name
return web.Response(text=text)
async def wshandle(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == web.WSMsgType.text:
await ws.send_str("Hello, {}".format(msg.data))
elif msg.type == web.WSMsgType.binary:
await ws.send_bytes(msg.data)
elif msg.type == web.WSMsgType.close:
break
return ws
app = web.Application()
app.add_routes([web.get('/', handle),
web.get('/echo', wshandle),
web.get('/{name}', handle)])
if __name__ == '__main__':
web.run_app(app)
Documentation
=============
https://aiohttp.readthedocs.io/
Demos
=====
https://github.com/aio-libs/aiohttp-demos
External links
==============
* `Third party libraries
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
* `Built with aiohttp
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
* `Powered by aiohttp
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
Feel free to post your questions and ideas here.
*gitter chat* https://gitter.im/aio-libs/Lobby
We support `Stack Overflow
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
Please add *aiohttp* tag to your question there.
Requirements
============
- Python >= 3.5.3
- async-timeout_
- attrs_
- chardet_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
.. _chardet: https://pypi.python.org/pypi/chardet
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
``aiohttp`` is offered under the Apache 2 license.
Keepsafe
========
The aiohttp community would like to thank Keepsafe
(https://www.getkeepsafe.com) for its support in the early days of
the project.
Source code
===========
The latest developer version is available in a GitHub repository:
https://github.com/aio-libs/aiohttp
Benchmarks
==========
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.6.3 (2020-10-12)
==================
Bugfixes
--------
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
release.
3.6.2 (2019-10-09)
==================
Features
--------
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
Bugfixes
--------
- Reset the ``sock_read`` timeout each time data is received for a
``aiohttp.ClientResponse``. `#3808
<https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fix handling of expired cookies so they are not stored in CookieJar.
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
- Fix misleading message in the string representation of ``ClientConnectorError``;
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
<https://github.com/aio-libs/aiohttp/issues/4097>`_
- Don't clobber HTTP status when using FileResponse.
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
Improved Documentation
----------------------
- Added minimal required logging configuration to logging documentation.
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
- Update docs to reflect proxy support.
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
- Fix typo in code example in testing docs.
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.6.1 (2019-09-19)
==================
Features
--------
- Compatibility with Python 3.8.
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
Bugfixes
--------
- correct some exception string format
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
unavailable because the runtime is built against
an outdated OpenSSL.
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
- Update multidict requirement to >= 4.5
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
Improved Documentation
----------------------
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
----
3.6.0 (2019-09-06)
==================
Features
--------
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
Proactor event loop to work. `#3629
<https://github.com/aio-libs/aiohttp/issues/3629>`_
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
compatible with more http proxy servers. `#3798
<https://github.com/aio-libs/aiohttp/issues/3798>`_
- Accept non-GET request for starting websocket handshake on server side.
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix an issue where cookies would sometimes not be set during a redirect.
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
- Change normalize_path_middleware to use 308 redirect instead of 301.
This behavior should prevent clients from being unable to use PUT/POST
methods on endpoints that are redirected because of a trailing slash.
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
task with unhandled exception when the server is used in conjunction with
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
- Use sanitized URL as Location header in redirects
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
- Improve typing annotations for multipart.py along with changes required
by mypy in files that references multipart.py.
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
- Cleanup per-chunk data in generic data read. Memory leak fixed.
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
- Use correct type for add_view and family
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
- Fix _keepalive field in __slots__ of ``RequestHandler``.
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
transport" exception when clients disconnect uncleanly.
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
- Suppress pytest warnings due to ``test_utils`` classes
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
- Fix overshadowing of overlapped sub-application prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Fixed return type annotation for WSMessage.json()
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
- Properly expose TooManyRedirects publicly as documented.
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
- Fix missing brackets for IPv6 in proxy CONNECT request
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
``asyncio.ClientSession.request`` according to the docs `#3852
<https://github.com/aio-libs/aiohttp/issues/3852>`_
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
- Add URL to the string representation of ClientResponseError.
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
Improved Documentation
----------------------
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
- use ``if __name__ == '__main__':`` in server examples.
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
- Update documentation reference to the default access logger.
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
----
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
``access_log=True`` and the event loop being in debug mode. `#3504
<https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
with files asynchronously. I/O based payloads from ``payload.py`` uses a
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
<https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write. Refactor the way how
``Payload.headers`` are handled. Payload instances now always have headers and
Content-Type defined. Fix Payload Content-Disposition header reset after initial
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
avoid a deprecation warning. `#3480
<https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
to run in a background executor to avoid blocking the main thread and potentially
triggering health check failures. (`#3205
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
and the default logger ``aiohttp.access`` is used, access logs will now be output
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
different HTTP method for WebSocket connection establishment. For example, ``Docker
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
collections module will not be supported anymore. (`#3273
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by ``normalize_path_middleware``. (`#3278
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await
connector.close()`` instead. (`#3417
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351

View File

@ -0,0 +1,124 @@
aiohttp/client_proto.py,sha256=ETzD290mCPX_gUP1ioHO3BNcMkbBjw3Ho2Yb_TyTPeU,8037
aiohttp/cookiejar.py,sha256=yWXa7YUNED17-De0ozYf5xoaortyFL4VURPqkuWIUec,11881
aiohttp/_find_header.c,sha256=lWc5w3UZiVd3ni60DuFDSSPzsaQUhAQcERDGBOqeML8,189932
aiohttp/_http_writer.pyx,sha256=vQgnMbmULTuhPvZ5cuZQgBhrqHoyE1caNnKeDtYZe5g,4201
aiohttp/web_log.py,sha256=ZBiwi0HPOiTuOJNHpolBzUoF1IdRJpRgcO2SoO_iKtI,8255
aiohttp/client_ws.py,sha256=AQlj-peBA0mGyra1t38sWlfV28MEM0SAATRXp1TsF9I,10694
aiohttp/_helpers.pyi,sha256=mJRb5YdG8DxYbPfVddGRGmi93qqaJM30L1qFpgSKQuA,204
aiohttp/tracing.py,sha256=HKVbmVEVIaSYIdPNzk93Uqk-Z7CIMrPDoTdPpuGUzdA,13353
aiohttp/worker.py,sha256=hekSLWLEJVrHrIrZ3dQga7Jzgtx_Cf3ZW7Zfd1J1G3A,8178
aiohttp/web_protocol.py,sha256=lWVV7t_VBWXAC0K4hI4MgCP_VoHUv7IZEJyQJmG4DYg,22192
aiohttp/_cparser.pxd,sha256=tgw30SL6kQSczzGMlMhx2Cuhf_O8P8ZPimVCb85xILc,3959
aiohttp/formdata.py,sha256=VZCo9kmDb50lQUcRMDfAH3d5lnRxBq_AX38ge8vFI00,5807
aiohttp/streams.py,sha256=4ZEJXKDDfQu2sEzETcDaggbSee7t5hy2dhD9ipBVAhg,20459
aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68
aiohttp/frozenlist.pyi,sha256=XmrOTtt3PH0evccYlSsFAkgWxqHizm9IyvRxvqSt2MA,1449
aiohttp/log.py,sha256=kOWU6EcyBQESISm27vc8dVEz_h9zxozLa5WCya1RzhQ,325
aiohttp/__init__.py,sha256=mLgcCI1IbtpejkbYB11E4_cdnTGXDucIVNlt-6Itres,8201
aiohttp/abc.py,sha256=lsf2bz-9KtqLhtI-e-tmgp3ynziMypYyEHvwOnFg7lQ,5392
aiohttp/_http_writer.cpython-36m-x86_64-linux-gnu.so,sha256=zm2He8lS3pa9tXHdRn34DQJa9L24nhsNtRbqHezQ2m4,200464
aiohttp/client_reqrep.py,sha256=45ITtqQw2yv_xKZBBFrjjqjjH2pAvNsMpvzJg3_gXDc,36012
aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049
aiohttp/resolver.py,sha256=pRF91jOjTNuCll5TMRjTe1OxnGZK4wjAggYLgvzXkGQ,3626
aiohttp/payload_streamer.py,sha256=ZNWaWwAxOIricwfjH4-YrkCqehowVizM6fJ_JVDR480,2103
aiohttp/client_exceptions.py,sha256=50ECpb_fyJ0GTop_y-wyU85acaX1g3AOTFtLPF3MXJ8,8494
aiohttp/web_ws.py,sha256=SUUDB0aQNe2nsv_fYjmkBwJJVjarI05JBzoReS9gCVs,16958
aiohttp/http.py,sha256=YzihfNgKUAnduXjLdyXfgQFWkJ4GlQpEdGgn8UWTmzs,2133
aiohttp/helpers.py,sha256=UXx60KcqNxiKnYpb3ruB4SuLA8n8gi_Qpd5cDzxVpUk,22898
aiohttp/web_app.py,sha256=0Qd7E6EoKMIG6kP8YjmZCO2JDvVK5GMQX5c-Om38kaQ,17265
aiohttp/_websocket.c,sha256=mhfGw4_fo7qzXsiAKDFclf5VBMwAY2-ccKiJV9uwSPk,136796
aiohttp/web_fileresponse.py,sha256=VokjofoX_cm00PvOUnQlGQJPrHdKbE1T1Yy6ZgsxYWI,12772
aiohttp/http_writer.py,sha256=XhGCqy_lzdLyxIzjQ_ufPFfJKTTWx1sb6YZWvrOFUPA,5239
aiohttp/_websocket.pyx,sha256=tJfygcVwKF_Xb6Pg48a6t50YO2xY4Rg0Wj7LcJJMi-U,1559
aiohttp/multipart.py,sha256=XWVdsgOQbBPl9QID1mvafkb6v0i8IaNUrjykk36xTFc,32781
aiohttp/test_utils.py,sha256=hK19Rw-SG7hC1-r60D2TMfkCPDINEURq-sa6NCSNvoI,20860
aiohttp/web_request.py,sha256=AwcGfvmCYevhzKedtDdYKPg999cX91bTIt9oeVyGqdY,25772
aiohttp/web.py,sha256=-Eh9m4aXyRXeaBkl7jvv_mlTH3d6IXsaS6llg9Yuido,19438
aiohttp/base_protocol.py,sha256=k11bt9JrBkKJcEDqTtES6vIguce0haZ9jVhcMDt9LYs,2663
aiohttp/web_exceptions.py,sha256=e8BRGZO-gJuDv-ZWmDt3Fq84iq_xF4fuKJzzWX7go6Y,10106
aiohttp/http_parser.py,sha256=v9csKsBv-rmOir1ikRBcDJDAaPMsFen1HoP8_Viz6xE,27912
aiohttp/web_urldispatcher.py,sha256=z4TIMTja2r8TDV46As7_acBSomTlook6TlAldRWB0Lc,38908
aiohttp/web_routedef.py,sha256=w_ATxAAzmh28uIvbCCo0guotfEDqYSb1gG0ZJ2QbyEM,6099
aiohttp/_helpers.c,sha256=RXISYNjuRMMK62Kowg2lfSY4vRfuoGciuNoau4-sOUk,208656
aiohttp/web_server.py,sha256=527MjryEIqWArFHMJlEABg3TcZgYtyJIFHY19Yvf3AI,2165
aiohttp/_headers.pxi,sha256=XgJL5FQRwL4uZQfegYShPclsErUlvG_xuMHs7dp_2-o,2027
aiohttp/http_websocket.py,sha256=IJLApqN53JsuQK-vU5rHgSzNjeeJhecwiS7AwLwWGKk,24741
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
aiohttp/_frozenlist.c,sha256=TJm7lrkQmqxgv34LNKxVSDGYoe_kzCDwe7UJhA9kWEw,288942
aiohttp/_http_writer.c,sha256=k2DdmpGmsAq-G_evurYCgq8QGsawmOVRS5AtCJNR3nM,211682
aiohttp/_http_parser.pyx,sha256=oysZhQ4DMVW16NtPL-G0ZZySPa2PfG2C3z62Yy02DnM,28731
aiohttp/tcp_helpers.py,sha256=1WVYM2C-HZQpgcksTyadRsl2_WeuXh_ECUxCcwji5d8,1631
aiohttp/signals.py,sha256=_ge2XQXBDWHoyCI4E-nXC-sOEJGVrJm0zYGHH0E5woQ,948
aiohttp/signals.pyi,sha256=bwT4vJeDdymZ8_gdpmfV4RhIolbI-XwZLCnpaq0xwSU,324
aiohttp/hdrs.py,sha256=iaXnHXOR_Dx0rvVkvmIZhc-7Egf2ByuSDI9tqskS0kQ,3449
aiohttp/web_response.py,sha256=KRtJz-oBNuJBHJCXy20Q6DHihQWRN-9ux11Vsl_POAk,25485
aiohttp/_helpers.cpython-36m-x86_64-linux-gnu.so,sha256=9zR1yTa2Qr7jdP5PLCA09yKvPp2tJDznucUXMey1w6A,217557
aiohttp/connector.py,sha256=UhBBB1f4ujLLts3TEGTVEoqrFsOOHWJegdyKoy_EhoM,42479
aiohttp/client.py,sha256=_-g2gD1Hm8XLRJme6ByaBRikIHbnQd_l65cOg_UHHUE,43979
aiohttp/frozenlist.py,sha256=I4zR368wRHXp402Z3f5lhd5i48b6A66MhHncW1JGkb4,1781
aiohttp/_frozenlist.pyx,sha256=BD8LcERExsWdo4qzuuQ84f-L_pHVzkUQO0lEAOe3Fog,2605
aiohttp/_http_parser.c,sha256=7z-M-ctMyF-iV5wv-gYLujX0a7jSsfKpIRcj-eQrVAo,997683
aiohttp/_frozenlist.cpython-36m-x86_64-linux-gnu.so,sha256=cIpr-ejc-rC_dyesxDD59serSpcpfeFXcOZXeW2Nwwc,313395
aiohttp/_http_parser.cpython-36m-x86_64-linux-gnu.so,sha256=_1g2iBpaOsqUxha4OehJdic8OPwmTUboCYBlBiObAuQ,1586366
aiohttp/_websocket.cpython-36m-x86_64-linux-gnu.so,sha256=wmFIAXiPUzcNloVu92BE9HTdthIyZux3TKd-KuX41u0,102842
aiohttp/http_exceptions.py,sha256=V8sRLSfrsWzKGDuK9TYSx9LWDuJO6a-gOW1BFSDBx5M,2663
aiohttp/web_middlewares.py,sha256=p45cYhF5l_HO8AAP4pic7dMpYg7qu5HgidFPprluXO0,4190
aiohttp/typedefs.py,sha256=8UijScgRRbeOOxLtkK9aqMNIFSOoqo65V0-KA40HCXo,1324
aiohttp/_find_header.h,sha256=5oOgQ85nF6V7rpU8NhyE5vyGkTo1Cgf1GIYrtxSTzQI,170
aiohttp/pytest_plugin.py,sha256=LOyTRuGLHL0XPAh5ByVaYeCReqRAwZfboXoCwtLxjVY,10835
aiohttp/payload.py,sha256=QdGUaBQGGhKOARbsRtVeq-EdlL4C__0e8Sm5DSbPPSU,14027
aiohttp/web_runner.py,sha256=6PqQBGdC8BanrxpYRlMn_33MpsDqstjOIGDftECb2uc,11142
aiohttp/locks.py,sha256=l-cW8wUbIkHaovghT7gpY8Yp5Vlo-u2G7_CR5xQqEQ8,1234
aiohttp-3.6.3.dist-info/WHEEL,sha256=ZCgRoodM6cLv8Z9Z-kuTK9QreeQu8GHOdQFY_br7fEU,109
aiohttp-3.6.3.dist-info/METADATA,sha256=UmObIpkmeVqZFB1rwDUEbIzmKMrVaEaEfDZulA48d_g,24570
aiohttp-3.6.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
aiohttp-3.6.3.dist-info/RECORD,,
aiohttp-3.6.3.dist-info/LICENSE.txt,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
aiohttp-3.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiohttp/__pycache__/hdrs.cpython-36.pyc,,
aiohttp/__pycache__/cookiejar.cpython-36.pyc,,
aiohttp/__pycache__/web_fileresponse.cpython-36.pyc,,
aiohttp/__pycache__/connector.cpython-36.pyc,,
aiohttp/__pycache__/tracing.cpython-36.pyc,,
aiohttp/__pycache__/web_middlewares.cpython-36.pyc,,
aiohttp/__pycache__/web_runner.cpython-36.pyc,,
aiohttp/__pycache__/formdata.cpython-36.pyc,,
aiohttp/__pycache__/payload.cpython-36.pyc,,
aiohttp/__pycache__/frozenlist.cpython-36.pyc,,
aiohttp/__pycache__/streams.cpython-36.pyc,,
aiohttp/__pycache__/http_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/locks.cpython-36.pyc,,
aiohttp/__pycache__/test_utils.cpython-36.pyc,,
aiohttp/__pycache__/__init__.cpython-36.pyc,,
aiohttp/__pycache__/typedefs.cpython-36.pyc,,
aiohttp/__pycache__/base_protocol.cpython-36.pyc,,
aiohttp/__pycache__/payload_streamer.cpython-36.pyc,,
aiohttp/__pycache__/http_parser.cpython-36.pyc,,
aiohttp/__pycache__/web_request.cpython-36.pyc,,
aiohttp/__pycache__/http.cpython-36.pyc,,
aiohttp/__pycache__/web_routedef.cpython-36.pyc,,
aiohttp/__pycache__/multipart.cpython-36.pyc,,
aiohttp/__pycache__/client_proto.cpython-36.pyc,,
aiohttp/__pycache__/web_response.cpython-36.pyc,,
aiohttp/__pycache__/http_writer.cpython-36.pyc,,
aiohttp/__pycache__/client_reqrep.cpython-36.pyc,,
aiohttp/__pycache__/web.cpython-36.pyc,,
aiohttp/__pycache__/client_ws.cpython-36.pyc,,
aiohttp/__pycache__/web_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/signals.cpython-36.pyc,,
aiohttp/__pycache__/web_app.cpython-36.pyc,,
aiohttp/__pycache__/resolver.cpython-36.pyc,,
aiohttp/__pycache__/web_log.cpython-36.pyc,,
aiohttp/__pycache__/client.cpython-36.pyc,,
aiohttp/__pycache__/helpers.cpython-36.pyc,,
aiohttp/__pycache__/pytest_plugin.cpython-36.pyc,,
aiohttp/__pycache__/web_ws.cpython-36.pyc,,
aiohttp/__pycache__/web_urldispatcher.cpython-36.pyc,,
aiohttp/__pycache__/worker.cpython-36.pyc,,
aiohttp/__pycache__/client_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/log.cpython-36.pyc,,
aiohttp/__pycache__/http_websocket.cpython-36.pyc,,
aiohttp/__pycache__/tcp_helpers.cpython-36.pyc,,
aiohttp/__pycache__/web_server.cpython-36.pyc,,
aiohttp/__pycache__/abc.cpython-36.pyc,,
aiohttp/__pycache__/web_protocol.cpython-36.pyc,,

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.35.1)
Root-Is-Purelib: false
Tag: cp36-cp36m-manylinux1_x86_64

View File

@ -0,0 +1,226 @@
__version__ = '3.6.3'
from typing import Tuple # noqa
from . import hdrs as hdrs
from .client import BaseConnector as BaseConnector
from .client import ClientConnectionError as ClientConnectionError
from .client import (
ClientConnectorCertificateError as ClientConnectorCertificateError,
)
from .client import ClientConnectorError as ClientConnectorError
from .client import ClientConnectorSSLError as ClientConnectorSSLError
from .client import ClientError as ClientError
from .client import ClientHttpProxyError as ClientHttpProxyError
from .client import ClientOSError as ClientOSError
from .client import ClientPayloadError as ClientPayloadError
from .client import ClientProxyConnectionError as ClientProxyConnectionError
from .client import ClientRequest as ClientRequest
from .client import ClientResponse as ClientResponse
from .client import ClientResponseError as ClientResponseError
from .client import ClientSession as ClientSession
from .client import ClientSSLError as ClientSSLError
from .client import ClientTimeout as ClientTimeout
from .client import ClientWebSocketResponse as ClientWebSocketResponse
from .client import ContentTypeError as ContentTypeError
from .client import Fingerprint as Fingerprint
from .client import InvalidURL as InvalidURL
from .client import NamedPipeConnector as NamedPipeConnector
from .client import RequestInfo as RequestInfo
from .client import ServerConnectionError as ServerConnectionError
from .client import ServerDisconnectedError as ServerDisconnectedError
from .client import ServerFingerprintMismatch as ServerFingerprintMismatch
from .client import ServerTimeoutError as ServerTimeoutError
from .client import TCPConnector as TCPConnector
from .client import TooManyRedirects as TooManyRedirects
from .client import UnixConnector as UnixConnector
from .client import WSServerHandshakeError as WSServerHandshakeError
from .client import request as request
from .cookiejar import CookieJar as CookieJar
from .cookiejar import DummyCookieJar as DummyCookieJar
from .formdata import FormData as FormData
from .helpers import BasicAuth as BasicAuth
from .helpers import ChainMapProxy as ChainMapProxy
from .http import HttpVersion as HttpVersion
from .http import HttpVersion10 as HttpVersion10
from .http import HttpVersion11 as HttpVersion11
from .http import WebSocketError as WebSocketError
from .http import WSCloseCode as WSCloseCode
from .http import WSMessage as WSMessage
from .http import WSMsgType as WSMsgType
from .multipart import (
BadContentDispositionHeader as BadContentDispositionHeader,
)
from .multipart import BadContentDispositionParam as BadContentDispositionParam
from .multipart import BodyPartReader as BodyPartReader
from .multipart import MultipartReader as MultipartReader
from .multipart import MultipartWriter as MultipartWriter
from .multipart import (
content_disposition_filename as content_disposition_filename,
)
from .multipart import parse_content_disposition as parse_content_disposition
from .payload import PAYLOAD_REGISTRY as PAYLOAD_REGISTRY
from .payload import AsyncIterablePayload as AsyncIterablePayload
from .payload import BufferedReaderPayload as BufferedReaderPayload
from .payload import BytesIOPayload as BytesIOPayload
from .payload import BytesPayload as BytesPayload
from .payload import IOBasePayload as IOBasePayload
from .payload import JsonPayload as JsonPayload
from .payload import Payload as Payload
from .payload import StringIOPayload as StringIOPayload
from .payload import StringPayload as StringPayload
from .payload import TextIOPayload as TextIOPayload
from .payload import get_payload as get_payload
from .payload import payload_type as payload_type
from .payload_streamer import streamer as streamer
from .resolver import AsyncResolver as AsyncResolver
from .resolver import DefaultResolver as DefaultResolver
from .resolver import ThreadedResolver as ThreadedResolver
from .signals import Signal as Signal
from .streams import EMPTY_PAYLOAD as EMPTY_PAYLOAD
from .streams import DataQueue as DataQueue
from .streams import EofStream as EofStream
from .streams import FlowControlDataQueue as FlowControlDataQueue
from .streams import StreamReader as StreamReader
from .tracing import TraceConfig as TraceConfig
from .tracing import (
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
)
from .tracing import (
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
)
from .tracing import (
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
)
from .tracing import (
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
)
from .tracing import (
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
)
from .tracing import TraceDnsCacheHitParams as TraceDnsCacheHitParams
from .tracing import TraceDnsCacheMissParams as TraceDnsCacheMissParams
from .tracing import (
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
)
from .tracing import (
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
)
from .tracing import TraceRequestChunkSentParams as TraceRequestChunkSentParams
from .tracing import TraceRequestEndParams as TraceRequestEndParams
from .tracing import TraceRequestExceptionParams as TraceRequestExceptionParams
from .tracing import TraceRequestRedirectParams as TraceRequestRedirectParams
from .tracing import TraceRequestStartParams as TraceRequestStartParams
from .tracing import (
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
)
__all__ = (
'hdrs',
# client
'BaseConnector',
'ClientConnectionError',
'ClientConnectorCertificateError',
'ClientConnectorError',
'ClientConnectorSSLError',
'ClientError',
'ClientHttpProxyError',
'ClientOSError',
'ClientPayloadError',
'ClientProxyConnectionError',
'ClientResponse',
'ClientRequest',
'ClientResponseError',
'ClientSSLError',
'ClientSession',
'ClientTimeout',
'ClientWebSocketResponse',
'ContentTypeError',
'Fingerprint',
'InvalidURL',
'RequestInfo',
'ServerConnectionError',
'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ServerTimeoutError',
'TCPConnector',
'TooManyRedirects',
'UnixConnector',
'NamedPipeConnector',
'WSServerHandshakeError',
'request',
# cookiejar
'CookieJar',
'DummyCookieJar',
# formdata
'FormData',
# helpers
'BasicAuth',
'ChainMapProxy',
# http
'HttpVersion',
'HttpVersion10',
'HttpVersion11',
'WSMsgType',
'WSCloseCode',
'WSMessage',
'WebSocketError',
# multipart
'BadContentDispositionHeader',
'BadContentDispositionParam',
'BodyPartReader',
'MultipartReader',
'MultipartWriter',
'content_disposition_filename',
'parse_content_disposition',
# payload
'AsyncIterablePayload',
'BufferedReaderPayload',
'BytesIOPayload',
'BytesPayload',
'IOBasePayload',
'JsonPayload',
'PAYLOAD_REGISTRY',
'Payload',
'StringIOPayload',
'StringPayload',
'TextIOPayload',
'get_payload',
'payload_type',
# payload_streamer
'streamer',
# resolver
'AsyncResolver',
'DefaultResolver',
'ThreadedResolver',
# signals
'Signal',
'DataQueue',
'EMPTY_PAYLOAD',
'EofStream',
'FlowControlDataQueue',
'StreamReader',
# tracing
'TraceConfig',
'TraceConnectionCreateEndParams',
'TraceConnectionCreateStartParams',
'TraceConnectionQueuedEndParams',
'TraceConnectionQueuedStartParams',
'TraceConnectionReuseconnParams',
'TraceDnsCacheHitParams',
'TraceDnsCacheMissParams',
'TraceDnsResolveHostEndParams',
'TraceDnsResolveHostStartParams',
'TraceRequestChunkSentParams',
'TraceRequestEndParams',
'TraceRequestExceptionParams',
'TraceRequestRedirectParams',
'TraceRequestStartParams',
'TraceResponseChunkReceivedParams',
) # type: Tuple[str, ...]
try:
from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa
__all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker')
except ImportError: # pragma: no cover
pass

View File

@ -0,0 +1,140 @@
from libc.stdint cimport uint16_t, uint32_t, uint64_t
cdef extern from "../vendor/http-parser/http_parser.h":
ctypedef int (*http_data_cb) (http_parser*,
const char *at,
size_t length) except -1
ctypedef int (*http_cb) (http_parser*) except -1
struct http_parser:
unsigned int type
unsigned int flags
unsigned int state
unsigned int header_state
unsigned int index
uint32_t nread
uint64_t content_length
unsigned short http_major
unsigned short http_minor
unsigned int status_code
unsigned int method
unsigned int http_errno
unsigned int upgrade
void *data
struct http_parser_settings:
http_cb on_message_begin
http_data_cb on_url
http_data_cb on_status
http_data_cb on_header_field
http_data_cb on_header_value
http_cb on_headers_complete
http_data_cb on_body
http_cb on_message_complete
http_cb on_chunk_header
http_cb on_chunk_complete
enum http_parser_type:
HTTP_REQUEST,
HTTP_RESPONSE,
HTTP_BOTH
enum http_errno:
HPE_OK,
HPE_CB_message_begin,
HPE_CB_url,
HPE_CB_header_field,
HPE_CB_header_value,
HPE_CB_headers_complete,
HPE_CB_body,
HPE_CB_message_complete,
HPE_CB_status,
HPE_CB_chunk_header,
HPE_CB_chunk_complete,
HPE_INVALID_EOF_STATE,
HPE_HEADER_OVERFLOW,
HPE_CLOSED_CONNECTION,
HPE_INVALID_VERSION,
HPE_INVALID_STATUS,
HPE_INVALID_METHOD,
HPE_INVALID_URL,
HPE_INVALID_HOST,
HPE_INVALID_PORT,
HPE_INVALID_PATH,
HPE_INVALID_QUERY_STRING,
HPE_INVALID_FRAGMENT,
HPE_LF_EXPECTED,
HPE_INVALID_HEADER_TOKEN,
HPE_INVALID_CONTENT_LENGTH,
HPE_INVALID_CHUNK_SIZE,
HPE_INVALID_CONSTANT,
HPE_INVALID_INTERNAL_STATE,
HPE_STRICT,
HPE_PAUSED,
HPE_UNKNOWN
enum flags:
F_CHUNKED,
F_CONNECTION_KEEP_ALIVE,
F_CONNECTION_CLOSE,
F_CONNECTION_UPGRADE,
F_TRAILING,
F_UPGRADE,
F_SKIPBODY,
F_CONTENTLENGTH
enum http_method:
DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY,
LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND,
REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE,
MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR,
LINK, UNLINK
void http_parser_init(http_parser *parser, http_parser_type type)
size_t http_parser_execute(http_parser *parser,
const http_parser_settings *settings,
const char *data,
size_t len)
int http_should_keep_alive(const http_parser *parser)
void http_parser_settings_init(http_parser_settings *settings)
const char *http_errno_name(http_errno err)
const char *http_errno_description(http_errno err)
const char *http_method_str(http_method m)
# URL Parser
enum http_parser_url_fields:
UF_SCHEMA = 0,
UF_HOST = 1,
UF_PORT = 2,
UF_PATH = 3,
UF_QUERY = 4,
UF_FRAGMENT = 5,
UF_USERINFO = 6,
UF_MAX = 7
struct http_parser_url_field_data:
uint16_t off
uint16_t len
struct http_parser_url:
uint16_t field_set
uint16_t port
http_parser_url_field_data[<int>UF_MAX] field_data
void http_parser_url_init(http_parser_url *u)
int http_parser_parse_url(const char *buf,
size_t buflen,
int is_connect,
http_parser_url *u)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,14 @@
#ifndef _FIND_HEADERS_H
#define _FIND_HEADERS_H
#ifdef __cplusplus
extern "C" {
#endif
int find_header(const char *str, int size);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -0,0 +1,2 @@
cdef extern from "_find_header.h":
int find_header(char *, int)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,108 @@
from collections.abc import MutableSequence
cdef class FrozenList:
cdef readonly bint frozen
cdef list _items
def __init__(self, items=None):
self.frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
cdef object _check_frozen(self):
if self.frozen:
raise RuntimeError("Cannot modify frozen list.")
cdef inline object _fast_len(self):
return len(self._items)
def freeze(self):
self.frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
self._check_frozen()
self._items[index] = value
def __delitem__(self, index):
self._check_frozen()
del self._items[index]
def __len__(self):
return self._fast_len()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __richcmp__(self, other, op):
if op == 0: # <
return list(self) < other
if op == 1: # <=
return list(self) <= other
if op == 2: # ==
return list(self) == other
if op == 3: # !=
return list(self) != other
if op == 4: # >
return list(self) > other
if op == 5: # =>
return list(self) >= other
def insert(self, pos, item):
self._check_frozen()
self._items.insert(pos, item)
def __contains__(self, item):
return item in self._items
def __iadd__(self, items):
self._check_frozen()
self._items += list(items)
return self
def index(self, item):
return self._items.index(item)
def remove(self, item):
self._check_frozen()
self._items.remove(item)
def clear(self):
self._check_frozen()
self._items.clear()
def extend(self, items):
self._check_frozen()
self._items += list(items)
def reverse(self):
self._check_frozen()
self._items.reverse()
def pop(self, index=-1):
self._check_frozen()
return self._items.pop(index)
def append(self, item):
self._check_frozen()
return self._items.append(item)
def count(self, item):
return self._items.count(item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self.frozen,
self._items)
MutableSequence.register(FrozenList)

View File

@ -0,0 +1,84 @@
# The file is autogenerated from aiohttp/hdrs.py
# Run ./tools/gen.py to update it after the origin changing.
from . import hdrs
cdef tuple headers = (
hdrs.ACCEPT,
hdrs.ACCEPT_CHARSET,
hdrs.ACCEPT_ENCODING,
hdrs.ACCEPT_LANGUAGE,
hdrs.ACCEPT_RANGES,
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
hdrs.ACCESS_CONTROL_MAX_AGE,
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
hdrs.AGE,
hdrs.ALLOW,
hdrs.AUTHORIZATION,
hdrs.CACHE_CONTROL,
hdrs.CONNECTION,
hdrs.CONTENT_DISPOSITION,
hdrs.CONTENT_ENCODING,
hdrs.CONTENT_LANGUAGE,
hdrs.CONTENT_LENGTH,
hdrs.CONTENT_LOCATION,
hdrs.CONTENT_MD5,
hdrs.CONTENT_RANGE,
hdrs.CONTENT_TRANSFER_ENCODING,
hdrs.CONTENT_TYPE,
hdrs.COOKIE,
hdrs.DATE,
hdrs.DESTINATION,
hdrs.DIGEST,
hdrs.ETAG,
hdrs.EXPECT,
hdrs.EXPIRES,
hdrs.FORWARDED,
hdrs.FROM,
hdrs.HOST,
hdrs.IF_MATCH,
hdrs.IF_MODIFIED_SINCE,
hdrs.IF_NONE_MATCH,
hdrs.IF_RANGE,
hdrs.IF_UNMODIFIED_SINCE,
hdrs.KEEP_ALIVE,
hdrs.LAST_EVENT_ID,
hdrs.LAST_MODIFIED,
hdrs.LINK,
hdrs.LOCATION,
hdrs.MAX_FORWARDS,
hdrs.ORIGIN,
hdrs.PRAGMA,
hdrs.PROXY_AUTHENTICATE,
hdrs.PROXY_AUTHORIZATION,
hdrs.RANGE,
hdrs.REFERER,
hdrs.RETRY_AFTER,
hdrs.SEC_WEBSOCKET_ACCEPT,
hdrs.SEC_WEBSOCKET_EXTENSIONS,
hdrs.SEC_WEBSOCKET_KEY,
hdrs.SEC_WEBSOCKET_KEY1,
hdrs.SEC_WEBSOCKET_PROTOCOL,
hdrs.SEC_WEBSOCKET_VERSION,
hdrs.SERVER,
hdrs.SET_COOKIE,
hdrs.TE,
hdrs.TRAILER,
hdrs.TRANSFER_ENCODING,
hdrs.UPGRADE,
hdrs.URI,
hdrs.USER_AGENT,
hdrs.VARY,
hdrs.VIA,
hdrs.WANT_DIGEST,
hdrs.WARNING,
hdrs.WEBSOCKET,
hdrs.WWW_AUTHENTICATE,
hdrs.X_FORWARDED_FOR,
hdrs.X_FORWARDED_HOST,
hdrs.X_FORWARDED_PROTO,
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,8 @@
from typing import Any
class reify:
def __init__(self, wrapped: Any) -> None: ...
def __get__(self, inst: Any, owner: Any) -> Any: ...
def __set__(self, inst: Any, value: Any) -> None: ...

View File

@ -0,0 +1,35 @@
cdef class reify:
"""Use as a class method decorator. It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
cdef object wrapped
cdef object name
def __init__(self, wrapped):
self.wrapped = wrapped
self.name = wrapped.__name__
@property
def __doc__(self):
return self.wrapped.__doc__
def __get__(self, inst, owner):
try:
try:
return inst._cache[self.name]
except KeyError:
val = self.wrapped(inst)
inst._cache[self.name] = val
return val
except AttributeError:
if inst is None:
return self
raise
def __set__(self, inst, value):
raise AttributeError("reified property is read-only")

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,846 @@
#cython: language_level=3
#
# Based on https://github.com/MagicStack/httptools
#
from __future__ import absolute_import, print_function
from cpython.mem cimport PyMem_Malloc, PyMem_Free
from libc.string cimport memcpy
from cpython cimport (PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE,
Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize)
from multidict import (CIMultiDict as _CIMultiDict,
CIMultiDictProxy as _CIMultiDictProxy)
from yarl import URL as _URL
from aiohttp import hdrs
from .http_exceptions import (
BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError,
PayloadEncodingError, ContentLengthError, TransferEncodingError)
from .http_writer import (HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11)
from .http_parser import DeflateBuffer as _DeflateBuffer
from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD,
StreamReader as _StreamReader)
cimport cython
from aiohttp cimport _cparser as cparser
include "_headers.pxi"
from aiohttp cimport _find_header
DEF DEFAULT_FREELIST_SIZE = 250
cdef extern from "Python.h":
int PyByteArray_Resize(object, Py_ssize_t) except -1
Py_ssize_t PyByteArray_Size(object) except -1
char* PyByteArray_AsString(object)
__all__ = ('HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage')
cdef object URL = _URL
cdef object URL_build = URL.build
cdef object CIMultiDict = _CIMultiDict
cdef object CIMultiDictProxy = _CIMultiDictProxy
cdef object HttpVersion = _HttpVersion
cdef object HttpVersion10 = _HttpVersion10
cdef object HttpVersion11 = _HttpVersion11
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
cdef object StreamReader = _StreamReader
cdef object DeflateBuffer = _DeflateBuffer
cdef inline object extend(object buf, const char* at, size_t length):
cdef Py_ssize_t s
cdef char* ptr
s = PyByteArray_Size(buf)
PyByteArray_Resize(buf, s + length)
ptr = PyByteArray_AsString(buf)
memcpy(ptr + s, at, length)
DEF METHODS_COUNT = 34;
cdef list _http_method = []
for i in range(METHODS_COUNT):
_http_method.append(
cparser.http_method_str(<cparser.http_method> i).decode('ascii'))
cdef inline str http_method_str(int i):
if i < METHODS_COUNT:
return <str>_http_method[i]
else:
return "<unknown>"
cdef inline object find_header(bytes raw_header):
cdef Py_ssize_t size
cdef char *buf
cdef int idx
PyBytes_AsStringAndSize(raw_header, &buf, &size)
idx = _find_header.find_header(buf, size)
if idx == -1:
return raw_header.decode('utf-8', 'surrogateescape')
return headers[idx]
@cython.freelist(DEFAULT_FREELIST_SIZE)
cdef class RawRequestMessage:
cdef readonly str method
cdef readonly str path
cdef readonly object version # HttpVersion
cdef readonly object headers # CIMultiDict
cdef readonly object raw_headers # tuple
cdef readonly object should_close
cdef readonly object compression
cdef readonly object upgrade
cdef readonly object chunked
cdef readonly object url # yarl.URL
def __init__(self, method, path, version, headers, raw_headers,
should_close, compression, upgrade, chunked, url):
self.method = method
self.path = path
self.version = version
self.headers = headers
self.raw_headers = raw_headers
self.should_close = should_close
self.compression = compression
self.upgrade = upgrade
self.chunked = chunked
self.url = url
def __repr__(self):
info = []
info.append(("method", self.method))
info.append(("path", self.path))
info.append(("version", self.version))
info.append(("headers", self.headers))
info.append(("raw_headers", self.raw_headers))
info.append(("should_close", self.should_close))
info.append(("compression", self.compression))
info.append(("upgrade", self.upgrade))
info.append(("chunked", self.chunked))
info.append(("url", self.url))
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
return '<RawRequestMessage(' + sinfo + ')>'
def _replace(self, **dct):
cdef RawRequestMessage ret
ret = _new_request_message(self.method,
self.path,
self.version,
self.headers,
self.raw_headers,
self.should_close,
self.compression,
self.upgrade,
self.chunked,
self.url)
if "method" in dct:
ret.method = dct["method"]
if "path" in dct:
ret.path = dct["path"]
if "version" in dct:
ret.version = dct["version"]
if "headers" in dct:
ret.headers = dct["headers"]
if "raw_headers" in dct:
ret.raw_headers = dct["raw_headers"]
if "should_close" in dct:
ret.should_close = dct["should_close"]
if "compression" in dct:
ret.compression = dct["compression"]
if "upgrade" in dct:
ret.upgrade = dct["upgrade"]
if "chunked" in dct:
ret.chunked = dct["chunked"]
if "url" in dct:
ret.url = dct["url"]
return ret
cdef _new_request_message(str method,
str path,
object version,
object headers,
object raw_headers,
bint should_close,
object compression,
bint upgrade,
bint chunked,
object url):
cdef RawRequestMessage ret
ret = RawRequestMessage.__new__(RawRequestMessage)
ret.method = method
ret.path = path
ret.version = version
ret.headers = headers
ret.raw_headers = raw_headers
ret.should_close = should_close
ret.compression = compression
ret.upgrade = upgrade
ret.chunked = chunked
ret.url = url
return ret
@cython.freelist(DEFAULT_FREELIST_SIZE)
cdef class RawResponseMessage:
cdef readonly object version # HttpVersion
cdef readonly int code
cdef readonly str reason
cdef readonly object headers # CIMultiDict
cdef readonly object raw_headers # tuple
cdef readonly object should_close
cdef readonly object compression
cdef readonly object upgrade
cdef readonly object chunked
def __init__(self, version, code, reason, headers, raw_headers,
should_close, compression, upgrade, chunked):
self.version = version
self.code = code
self.reason = reason
self.headers = headers
self.raw_headers = raw_headers
self.should_close = should_close
self.compression = compression
self.upgrade = upgrade
self.chunked = chunked
def __repr__(self):
info = []
info.append(("version", self.version))
info.append(("code", self.code))
info.append(("reason", self.reason))
info.append(("headers", self.headers))
info.append(("raw_headers", self.raw_headers))
info.append(("should_close", self.should_close))
info.append(("compression", self.compression))
info.append(("upgrade", self.upgrade))
info.append(("chunked", self.chunked))
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
return '<RawResponseMessage(' + sinfo + ')>'
cdef _new_response_message(object version,
int code,
str reason,
object headers,
object raw_headers,
bint should_close,
object compression,
bint upgrade,
bint chunked):
cdef RawResponseMessage ret
ret = RawResponseMessage.__new__(RawResponseMessage)
ret.version = version
ret.code = code
ret.reason = reason
ret.headers = headers
ret.raw_headers = raw_headers
ret.should_close = should_close
ret.compression = compression
ret.upgrade = upgrade
ret.chunked = chunked
return ret
@cython.internal
cdef class HttpParser:
cdef:
cparser.http_parser* _cparser
cparser.http_parser_settings* _csettings
bytearray _raw_name
bytearray _raw_value
bint _has_value
object _protocol
object _loop
object _timer
size_t _max_line_size
size_t _max_field_size
size_t _max_headers
bint _response_with_body
bint _started
object _url
bytearray _buf
str _path
str _reason
object _headers
list _raw_headers
bint _upgraded
list _messages
object _payload
bint _payload_error
object _payload_exception
object _last_error
bint _auto_decompress
str _content_encoding
Py_buffer py_buf
def __cinit__(self):
self._cparser = <cparser.http_parser*> \
PyMem_Malloc(sizeof(cparser.http_parser))
if self._cparser is NULL:
raise MemoryError()
self._csettings = <cparser.http_parser_settings*> \
PyMem_Malloc(sizeof(cparser.http_parser_settings))
if self._csettings is NULL:
raise MemoryError()
def __dealloc__(self):
PyMem_Free(self._cparser)
PyMem_Free(self._csettings)
cdef _init(self, cparser.http_parser_type mode,
object protocol, object loop, object timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint auto_decompress=True):
cparser.http_parser_init(self._cparser, mode)
self._cparser.data = <void*>self
self._cparser.content_length = 0
cparser.http_parser_settings_init(self._csettings)
self._protocol = protocol
self._loop = loop
self._timer = timer
self._buf = bytearray()
self._payload = None
self._payload_error = 0
self._payload_exception = payload_exception
self._messages = []
self._raw_name = bytearray()
self._raw_value = bytearray()
self._has_value = False
self._max_line_size = max_line_size
self._max_headers = max_headers
self._max_field_size = max_field_size
self._response_with_body = response_with_body
self._upgraded = False
self._auto_decompress = auto_decompress
self._content_encoding = None
self._csettings.on_url = cb_on_url
self._csettings.on_status = cb_on_status
self._csettings.on_header_field = cb_on_header_field
self._csettings.on_header_value = cb_on_header_value
self._csettings.on_headers_complete = cb_on_headers_complete
self._csettings.on_body = cb_on_body
self._csettings.on_message_begin = cb_on_message_begin
self._csettings.on_message_complete = cb_on_message_complete
self._csettings.on_chunk_header = cb_on_chunk_header
self._csettings.on_chunk_complete = cb_on_chunk_complete
self._last_error = None
cdef _process_header(self):
if self._raw_name:
raw_name = bytes(self._raw_name)
raw_value = bytes(self._raw_value)
name = find_header(raw_name)
value = raw_value.decode('utf-8', 'surrogateescape')
self._headers.add(name, value)
if name is CONTENT_ENCODING:
self._content_encoding = value
PyByteArray_Resize(self._raw_name, 0)
PyByteArray_Resize(self._raw_value, 0)
self._has_value = False
self._raw_headers.append((raw_name, raw_value))
cdef _on_header_field(self, char* at, size_t length):
cdef Py_ssize_t size
cdef char *buf
if self._has_value:
self._process_header()
size = PyByteArray_Size(self._raw_name)
PyByteArray_Resize(self._raw_name, size + length)
buf = PyByteArray_AsString(self._raw_name)
memcpy(buf + size, at, length)
cdef _on_header_value(self, char* at, size_t length):
cdef Py_ssize_t size
cdef char *buf
size = PyByteArray_Size(self._raw_value)
PyByteArray_Resize(self._raw_value, size + length)
buf = PyByteArray_AsString(self._raw_value)
memcpy(buf + size, at, length)
self._has_value = True
cdef _on_headers_complete(self):
self._process_header()
method = http_method_str(self._cparser.method)
should_close = not cparser.http_should_keep_alive(self._cparser)
upgrade = self._cparser.upgrade
chunked = self._cparser.flags & cparser.F_CHUNKED
raw_headers = tuple(self._raw_headers)
headers = CIMultiDictProxy(self._headers)
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
self._upgraded = True
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
encoding = None
enc = self._content_encoding
if enc is not None:
self._content_encoding = None
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
encoding = enc
if self._cparser.type == cparser.HTTP_REQUEST:
msg = _new_request_message(
method, self._path,
self.http_version(), headers, raw_headers,
should_close, encoding, upgrade, chunked, self._url)
else:
msg = _new_response_message(
self.http_version(), self._cparser.status_code, self._reason,
headers, raw_headers, should_close, encoding,
upgrade, chunked)
if (self._cparser.content_length > 0 or chunked or
self._cparser.method == 5): # CONNECT: 5
payload = StreamReader(
self._protocol, timer=self._timer, loop=self._loop)
else:
payload = EMPTY_PAYLOAD
self._payload = payload
if encoding is not None and self._auto_decompress:
self._payload = DeflateBuffer(payload, encoding)
if not self._response_with_body:
payload = EMPTY_PAYLOAD
self._messages.append((msg, payload))
cdef _on_message_complete(self):
self._payload.feed_eof()
self._payload = None
cdef _on_chunk_header(self):
self._payload.begin_http_chunk_receiving()
cdef _on_chunk_complete(self):
self._payload.end_http_chunk_receiving()
cdef object _on_status_complete(self):
pass
cdef inline http_version(self):
cdef cparser.http_parser* parser = self._cparser
if parser.http_major == 1:
if parser.http_minor == 0:
return HttpVersion10
elif parser.http_minor == 1:
return HttpVersion11
return HttpVersion(parser.http_major, parser.http_minor)
### Public API ###
def feed_eof(self):
cdef bytes desc
if self._payload is not None:
if self._cparser.flags & cparser.F_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
elif self._cparser.flags & cparser.F_CONTENTLENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
elif self._cparser.http_errno != cparser.HPE_OK:
desc = cparser.http_errno_description(
<cparser.http_errno> self._cparser.http_errno)
raise PayloadEncodingError(desc.decode('latin-1'))
else:
self._payload.feed_eof()
elif self._started:
self._on_headers_complete()
if self._messages:
return self._messages[-1][0]
def feed_data(self, data):
cdef:
size_t data_len
size_t nb
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
data_len = <size_t>self.py_buf.len
nb = cparser.http_parser_execute(
self._cparser,
self._csettings,
<char*>self.py_buf.buf,
data_len)
PyBuffer_Release(&self.py_buf)
# i am not sure about cparser.HPE_INVALID_METHOD,
# seems get err for valid request
# test_client_functional.py::test_post_data_with_bytesio_file
if (self._cparser.http_errno != cparser.HPE_OK and
(self._cparser.http_errno != cparser.HPE_INVALID_METHOD or
self._cparser.method == 0)):
if self._payload_error == 0:
if self._last_error is not None:
ex = self._last_error
self._last_error = None
else:
ex = parser_error_from_errno(
<cparser.http_errno> self._cparser.http_errno)
self._payload = None
raise ex
if self._messages:
messages = self._messages
self._messages = []
else:
messages = ()
if self._upgraded:
return messages, True, data[nb:]
else:
return messages, False, b''
cdef class HttpRequestParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False):
self._init(cparser.HTTP_REQUEST, protocol, loop, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body)
cdef object _on_status_complete(self):
cdef Py_buffer py_buf
if not self._buf:
return
self._path = self._buf.decode('utf-8', 'surrogateescape')
if self._cparser.method == 5: # CONNECT
self._url = URL(self._path)
else:
PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE)
try:
self._url = _parse_url(<char*>py_buf.buf,
py_buf.len)
finally:
PyBuffer_Release(&py_buf)
PyByteArray_Resize(self._buf, 0)
cdef class HttpResponseParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True):
self._init(cparser.HTTP_RESPONSE, protocol, loop, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, auto_decompress)
cdef object _on_status_complete(self):
if self._buf:
self._reason = self._buf.decode('utf-8', 'surrogateescape')
PyByteArray_Resize(self._buf, 0)
else:
self._reason = self._reason or ''
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
pyparser._started = True
pyparser._headers = CIMultiDict()
pyparser._raw_headers = []
PyByteArray_Resize(pyparser._buf, 0)
pyparser._path = None
pyparser._reason = None
return 0
cdef int cb_on_url(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
if length > pyparser._max_line_size:
raise LineTooLong(
'Status line is too long', pyparser._max_line_size, length)
extend(pyparser._buf, at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_status(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef str reason
try:
if length > pyparser._max_line_size:
raise LineTooLong(
'Status line is too long', pyparser._max_line_size, length)
extend(pyparser._buf, at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_header_field(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
try:
pyparser._on_status_complete()
size = len(pyparser._raw_name) + length
if size > pyparser._max_field_size:
raise LineTooLong(
'Header name is too long', pyparser._max_field_size, size)
pyparser._on_header_field(at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_header_value(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef Py_ssize_t size
try:
size = len(pyparser._raw_value) + length
if size > pyparser._max_field_size:
raise LineTooLong(
'Header value is too long', pyparser._max_field_size, size)
pyparser._on_header_value(at, length)
except BaseException as ex:
pyparser._last_error = ex
return -1
else:
return 0
cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_status_complete()
pyparser._on_headers_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
return 2
else:
return 0
cdef int cb_on_body(cparser.http_parser* parser,
const char *at, size_t length) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
cdef bytes body = at[:length]
try:
pyparser._payload.feed_data(body, length)
except BaseException as exc:
if pyparser._payload_exception is not None:
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
else:
pyparser._payload.set_exception(exc)
pyparser._payload_error = 1
return -1
else:
return 0
cdef int cb_on_message_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._started = False
pyparser._on_message_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_header()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1:
cdef HttpParser pyparser = <HttpParser>parser.data
try:
pyparser._on_chunk_complete()
except BaseException as exc:
pyparser._last_error = exc
return -1
else:
return 0
cdef parser_error_from_errno(cparser.http_errno errno):
cdef bytes desc = cparser.http_errno_description(errno)
if errno in (cparser.HPE_CB_message_begin,
cparser.HPE_CB_url,
cparser.HPE_CB_header_field,
cparser.HPE_CB_header_value,
cparser.HPE_CB_headers_complete,
cparser.HPE_CB_body,
cparser.HPE_CB_message_complete,
cparser.HPE_CB_status,
cparser.HPE_CB_chunk_header,
cparser.HPE_CB_chunk_complete):
cls = BadHttpMessage
elif errno == cparser.HPE_INVALID_STATUS:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_METHOD:
cls = BadStatusLine
elif errno == cparser.HPE_INVALID_URL:
cls = InvalidURLError
else:
cls = BadHttpMessage
return cls(desc.decode('latin-1'))
def parse_url(url):
cdef:
Py_buffer py_buf
char* buf_data
PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
try:
buf_data = <char*>py_buf.buf
return _parse_url(buf_data, py_buf.len)
finally:
PyBuffer_Release(&py_buf)
cdef _parse_url(char* buf_data, size_t length):
cdef:
cparser.http_parser_url* parsed
int res
str schema = None
str host = None
object port = None
str path = None
str query = None
str fragment = None
str user = None
str password = None
str userinfo = None
object result = None
int off
int ln
parsed = <cparser.http_parser_url*> \
PyMem_Malloc(sizeof(cparser.http_parser_url))
if parsed is NULL:
raise MemoryError()
cparser.http_parser_url_init(parsed)
try:
res = cparser.http_parser_parse_url(buf_data, length, 0, parsed)
if res == 0:
if parsed.field_set & (1 << cparser.UF_SCHEMA):
off = parsed.field_data[<int>cparser.UF_SCHEMA].off
ln = parsed.field_data[<int>cparser.UF_SCHEMA].len
schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
schema = ''
if parsed.field_set & (1 << cparser.UF_HOST):
off = parsed.field_data[<int>cparser.UF_HOST].off
ln = parsed.field_data[<int>cparser.UF_HOST].len
host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
host = ''
if parsed.field_set & (1 << cparser.UF_PORT):
port = parsed.port
if parsed.field_set & (1 << cparser.UF_PATH):
off = parsed.field_data[<int>cparser.UF_PATH].off
ln = parsed.field_data[<int>cparser.UF_PATH].len
path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
path = ''
if parsed.field_set & (1 << cparser.UF_QUERY):
off = parsed.field_data[<int>cparser.UF_QUERY].off
ln = parsed.field_data[<int>cparser.UF_QUERY].len
query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
query = ''
if parsed.field_set & (1 << cparser.UF_FRAGMENT):
off = parsed.field_data[<int>cparser.UF_FRAGMENT].off
ln = parsed.field_data[<int>cparser.UF_FRAGMENT].len
fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
else:
fragment = ''
if parsed.field_set & (1 << cparser.UF_USERINFO):
off = parsed.field_data[<int>cparser.UF_USERINFO].off
ln = parsed.field_data[<int>cparser.UF_USERINFO].len
userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape')
user, sep, password = userinfo.partition(':')
return URL_build(scheme=schema,
user=user, password=password, host=host, port=port,
path=path, query=query, fragment=fragment)
else:
raise InvalidURLError("invalid url {!r}".format(buf_data))
finally:
PyMem_Free(parsed)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,152 @@
from libc.stdint cimport uint8_t, uint64_t
from libc.string cimport memcpy
from cpython.exc cimport PyErr_NoMemory
from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free
from cpython.bytes cimport PyBytes_FromStringAndSize
from cpython.object cimport PyObject_Str
from multidict import istr
DEF BUF_SIZE = 16 * 1024 # 16KiB
cdef char BUFFER[BUF_SIZE]
cdef object _istr = istr
# ----------------- writer ---------------------------
cdef struct Writer:
char *buf
Py_ssize_t size
Py_ssize_t pos
cdef inline void _init_writer(Writer* writer):
writer.buf = &BUFFER[0]
writer.size = BUF_SIZE
writer.pos = 0
cdef inline void _release_writer(Writer* writer):
if writer.buf != BUFFER:
PyMem_Free(writer.buf)
cdef inline int _write_byte(Writer* writer, uint8_t ch):
cdef char * buf
cdef Py_ssize_t size
if writer.pos == writer.size:
# reallocate
size = writer.size + BUF_SIZE
if writer.buf == BUFFER:
buf = <char*>PyMem_Malloc(size)
if buf == NULL:
PyErr_NoMemory()
return -1
memcpy(buf, writer.buf, writer.size)
else:
buf = <char*>PyMem_Realloc(writer.buf, size)
if buf == NULL:
PyErr_NoMemory()
return -1
writer.buf = buf
writer.size = size
writer.buf[writer.pos] = <char>ch
writer.pos += 1
return 0
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
cdef uint64_t utf = <uint64_t> symbol
if utf < 0x80:
return _write_byte(writer, <uint8_t>utf)
elif utf < 0x800:
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
return -1
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
elif 0xD800 <= utf <= 0xDFFF:
# surogate pair, ignored
return 0
elif utf < 0x10000:
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
return -1
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
return -1
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
elif utf > 0x10FFFF:
# symbol is too large
return 0
else:
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
return -1
if _write_byte(writer,
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
return -1
if _write_byte(writer,
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
return -1
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
cdef inline int _write_str(Writer* writer, str s):
cdef Py_UCS4 ch
for ch in s:
if _write_utf8(writer, ch) < 0:
return -1
# --------------- _serialize_headers ----------------------
cdef str to_str(object s):
typ = type(s)
if typ is str:
return <str>s
elif typ is _istr:
return PyObject_Str(s)
elif not isinstance(s, str):
raise TypeError("Cannot serialize non-str key {!r}".format(s))
else:
return str(s)
def _serialize_headers(str status_line, headers):
cdef Writer writer
cdef object key
cdef object val
cdef bytes ret
_init_writer(&writer)
try:
if _write_str(&writer, status_line) < 0:
raise
if _write_byte(&writer, b'\r') < 0:
raise
if _write_byte(&writer, b'\n') < 0:
raise
for key, val in headers.items():
if _write_str(&writer, to_str(key)) < 0:
raise
if _write_byte(&writer, b':') < 0:
raise
if _write_byte(&writer, b' ') < 0:
raise
if _write_str(&writer, to_str(val)) < 0:
raise
if _write_byte(&writer, b'\r') < 0:
raise
if _write_byte(&writer, b'\n') < 0:
raise
if _write_byte(&writer, b'\r') < 0:
raise
if _write_byte(&writer, b'\n') < 0:
raise
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
finally:
_release_writer(&writer)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,54 @@
from cpython cimport PyBytes_AsString
#from cpython cimport PyByteArray_AsString # cython still not exports that
cdef extern from "Python.h":
char* PyByteArray_AsString(bytearray ba) except NULL
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
def _websocket_mask_cython(object mask, object data):
"""Note, this function mutates its `data` argument
"""
cdef:
Py_ssize_t data_len, i
# bit operations on signed integers are implementation-specific
unsigned char * in_buf
const unsigned char * mask_buf
uint32_t uint32_msk
uint64_t uint64_msk
assert len(mask) == 4
if not isinstance(mask, bytes):
mask = bytes(mask)
if isinstance(data, bytearray):
data = <bytearray>data
else:
data = bytearray(data)
data_len = len(data)
in_buf = <unsigned char*>PyByteArray_AsString(data)
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
uint32_msk = (<uint32_t*>mask_buf)[0]
# TODO: align in_data ptr to achieve even faster speeds
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
if sizeof(size_t) >= 8:
uint64_msk = uint32_msk
uint64_msk = (uint64_msk << 32) | uint32_msk
while data_len >= 8:
(<uint64_t*>in_buf)[0] ^= uint64_msk
in_buf += 8
data_len -= 8
while data_len >= 4:
(<uint32_t*>in_buf)[0] ^= uint32_msk
in_buf += 4
data_len -= 4
for i in range(0, data_len):
in_buf[i] ^= mask_buf[i]

View File

@ -0,0 +1,208 @@
import asyncio
import logging
from abc import ABC, abstractmethod
from collections.abc import Sized
from http.cookies import BaseCookie, Morsel # noqa
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
Generator,
Iterable,
List,
Optional,
Tuple,
)
from multidict import CIMultiDict # noqa
from yarl import URL
from .helpers import get_running_loop
from .typedefs import LooseCookies
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
from .web_app import Application
from .web_exceptions import HTTPException
else:
BaseRequest = Request = Application = StreamResponse = None
HTTPException = None
class AbstractRouter(ABC):
def __init__(self) -> None:
self._frozen = False
def post_init(self, app: Application) -> None:
"""Post init stage.
Not an abstract method for sake of backward compatibility,
but if the router wants to be aware of the application
it can override this.
"""
@property
def frozen(self) -> bool:
return self._frozen
def freeze(self) -> None:
"""Freeze router."""
self._frozen = True
@abstractmethod
async def resolve(self, request: Request) -> 'AbstractMatchInfo':
"""Return MATCH_INFO for given request"""
class AbstractMatchInfo(ABC):
@property # pragma: no branch
@abstractmethod
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
"""Execute matched request handler"""
@property
@abstractmethod
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
"""Expect handler for 100-continue processing"""
@property # pragma: no branch
@abstractmethod
def http_exception(self) -> Optional[HTTPException]:
"""HTTPException instance raised on router's resolving, or None"""
@abstractmethod # pragma: no branch
def get_info(self) -> Dict[str, Any]:
"""Return a dict with additional info useful for introspection"""
@property # pragma: no branch
@abstractmethod
def apps(self) -> Tuple[Application, ...]:
"""Stack of nested applications.
Top level application is left-most element.
"""
@abstractmethod
def add_app(self, app: Application) -> None:
"""Add application to the nested apps stack."""
@abstractmethod
def freeze(self) -> None:
"""Freeze the match info.
The method is called after route resolution.
After the call .add_app() is forbidden.
"""
class AbstractView(ABC):
"""Abstract class based view."""
def __init__(self, request: Request) -> None:
self._request = request
@property
def request(self) -> Request:
"""Request instance."""
return self._request
@abstractmethod
def __await__(self) -> Generator[Any, None, StreamResponse]:
"""Execute the view handler."""
class AbstractResolver(ABC):
"""Abstract DNS resolver."""
@abstractmethod
async def resolve(self, host: str,
port: int, family: int) -> List[Dict[str, Any]]:
"""Return IP address for given hostname"""
@abstractmethod
async def close(self) -> None:
"""Release resolver"""
if TYPE_CHECKING: # pragma: no cover
IterableBase = Iterable[Morsel[str]]
else:
IterableBase = Iterable
class AbstractCookieJar(Sized, IterableBase):
"""Abstract Cookie Jar."""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
self._loop = get_running_loop(loop)
@abstractmethod
def clear(self) -> None:
"""Clear all cookies."""
@abstractmethod
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
"""Update cookies."""
@abstractmethod
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
"""Return the jar's cookies filtered by their attributes."""
class AbstractStreamWriter(ABC):
"""Abstract stream writer."""
buffer_size = 0
output_size = 0
length = 0 # type: Optional[int]
@abstractmethod
async def write(self, chunk: bytes) -> None:
"""Write chunk into stream."""
@abstractmethod
async def write_eof(self, chunk: bytes=b'') -> None:
"""Write last chunk."""
@abstractmethod
async def drain(self) -> None:
"""Flush the write buffer."""
@abstractmethod
def enable_compression(self, encoding: str='deflate') -> None:
"""Enable HTTP body compression"""
@abstractmethod
def enable_chunking(self) -> None:
"""Enable HTTP chunked mode"""
@abstractmethod
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
"""Write HTTP headers"""
class AbstractAccessLogger(ABC):
"""Abstract writer to access log."""
def __init__(self, logger: logging.Logger, log_format: str) -> None:
self.logger = logger
self.log_format = log_format
@abstractmethod
def log(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
"""Emit log to logger."""

View File

@ -0,0 +1,81 @@
import asyncio
from typing import Optional, cast
from .tcp_helpers import tcp_nodelay
class BaseProtocol(asyncio.Protocol):
__slots__ = ('_loop', '_paused', '_drain_waiter',
'_connection_lost', '_reading_paused', 'transport')
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop # type: asyncio.AbstractEventLoop
self._paused = False
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
self._connection_lost = False
self._reading_paused = False
self.transport = None # type: Optional[asyncio.Transport]
def pause_writing(self) -> None:
assert not self._paused
self._paused = True
def resume_writing(self) -> None:
assert self._paused
self._paused = False
waiter = self._drain_waiter
if waiter is not None:
self._drain_waiter = None
if not waiter.done():
waiter.set_result(None)
def pause_reading(self) -> None:
if not self._reading_paused and self.transport is not None:
try:
self.transport.pause_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = True
def resume_reading(self) -> None:
if self._reading_paused and self.transport is not None:
try:
self.transport.resume_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = False
def connection_made(self, transport: asyncio.BaseTransport) -> None:
tr = cast(asyncio.Transport, transport)
tcp_nodelay(tr, True)
self.transport = tr
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._connection_lost = True
# Wake up the writer if currently paused.
self.transport = None
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
return
self._drain_waiter = None
if waiter.done():
return
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
async def _drain_helper(self) -> None:
if self._connection_lost:
raise ConnectionResetError('Connection lost')
if not self._paused:
return
waiter = self._drain_waiter
assert waiter is None or waiter.cancelled()
waiter = self._loop.create_future()
self._drain_waiter = waiter
await waiter

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,292 @@
"""HTTP related errors."""
import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
from .typedefs import _CIMultiDict
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa
Fingerprint)
else:
RequestInfo = ClientResponse = ConnectionKey = None
__all__ = (
'ClientError',
'ClientConnectionError',
'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',
'ClientSSLError',
'ClientConnectorSSLError', 'ClientConnectorCertificateError',
'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ClientResponseError', 'ClientHttpProxyError',
'WSServerHandshakeError', 'ContentTypeError',
'ClientPayloadError', 'InvalidURL')
class ClientError(Exception):
"""Base class for client connection errors."""
class ClientResponseError(ClientError):
"""Connection error during reading response.
request_info: instance of RequestInfo
"""
def __init__(self, request_info: RequestInfo,
history: Tuple[ClientResponse, ...], *,
code: Optional[int]=None,
status: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
self.request_info = request_info
if code is not None:
if status is not None:
raise ValueError(
"Both code and status arguments are provided; "
"code is deprecated, use status instead")
warnings.warn("code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
if status is not None:
self.status = status
elif code is not None:
self.status = code
else:
self.status = 0
self.message = message
self.headers = headers
self.history = history
self.args = (request_info, history)
def __str__(self) -> str:
return ("%s, message=%r, url=%r" %
(self.status, self.message, self.request_info.real_url))
def __repr__(self) -> str:
args = "%r, %r" % (self.request_info, self.history)
if self.status != 0:
args += ", status=%r" % (self.status,)
if self.message != '':
args += ", message=%r" % (self.message,)
if self.headers is not None:
args += ", headers=%r" % (self.headers,)
return "%s(%s)" % (type(self).__name__, args)
@property
def code(self) -> int:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
return self.status
@code.setter
def code(self, value: int) -> None:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
self.status = value
class ContentTypeError(ClientResponseError):
"""ContentType found is not valid."""
class WSServerHandshakeError(ClientResponseError):
"""websocket server handshake error."""
class ClientHttpProxyError(ClientResponseError):
"""HTTP proxy error.
Raised in :class:`aiohttp.connector.TCPConnector` if
proxy responds with status other than ``200 OK``
on ``CONNECT`` request.
"""
class TooManyRedirects(ClientResponseError):
"""Client was redirected too many times."""
class ClientConnectionError(ClientError):
"""Base class for client socket errors."""
class ClientOSError(ClientConnectionError, OSError):
"""OSError error."""
class ClientConnectorError(ClientOSError):
"""Client connector error.
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
def __init__(self, connection_key: ConnectionKey,
os_error: OSError) -> None:
self._conn_key = connection_key
self._os_error = os_error
super().__init__(os_error.errno, os_error.strerror)
self.args = (connection_key, os_error)
@property
def os_error(self) -> OSError:
return self._os_error
@property
def host(self) -> str:
return self._conn_key.host
@property
def port(self) -> Optional[int]:
return self._conn_key.port
@property
def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:
return self._conn_key.ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'
.format(self, self.ssl if self.ssl is not None else 'default',
self.strerror))
# OSError.__reduce__ does too much black magick
__reduce__ = BaseException.__reduce__
class ClientProxyConnectionError(ClientConnectorError):
"""Proxy connection error.
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
class ServerConnectionError(ClientConnectionError):
"""Server connection errors."""
class ServerDisconnectedError(ServerConnectionError):
"""Server disconnected."""
def __init__(self, message: Optional[str]=None) -> None:
self.message = message
if message is None:
self.args = ()
else:
self.args = (message,)
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
"""Server timeout error."""
class ServerFingerprintMismatch(ServerConnectionError):
"""SSL certificate does not match expected fingerprint."""
def __init__(self, expected: bytes, got: bytes,
host: str, port: int) -> None:
self.expected = expected
self.got = got
self.host = host
self.port = port
self.args = (expected, got, host, port)
def __repr__(self) -> str:
return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(
self.__class__.__name__, self.expected, self.got,
self.host, self.port)
class ClientPayloadError(ClientError):
"""Response payload error."""
class InvalidURL(ClientError, ValueError):
"""Invalid URL.
URL used for fetching is malformed, e.g. it doesn't contains host
part."""
# Derive from ValueError for backward compatibility
def __init__(self, url: Any) -> None:
# The type of url is not yarl.URL because the exception can be raised
# on URL(url) call
super().__init__(url)
@property
def url(self) -> Any:
return self.args[0]
def __repr__(self) -> str:
return '<{} {}>'.format(self.__class__.__name__, self.url)
class ClientSSLError(ClientConnectorError):
"""Base error for ssl.*Errors."""
if ssl is not None:
cert_errors = (ssl.CertificateError,)
cert_errors_bases = (ClientSSLError, ssl.CertificateError,)
ssl_errors = (ssl.SSLError,)
ssl_error_bases = (ClientSSLError, ssl.SSLError)
else: # pragma: no cover
cert_errors = tuple()
cert_errors_bases = (ClientSSLError, ValueError,)
ssl_errors = tuple()
ssl_error_bases = (ClientSSLError,)
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
"""Response ssl error."""
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
"""Response certificate error."""
def __init__(self, connection_key:
ConnectionKey, certificate_error: Exception) -> None:
self._conn_key = connection_key
self._certificate_error = certificate_error
self.args = (connection_key, certificate_error)
@property
def certificate_error(self) -> Exception:
return self._certificate_error
@property
def host(self) -> str:
return self._conn_key.host
@property
def port(self) -> Optional[int]:
return self._conn_key.port
@property
def ssl(self) -> bool:
return self._conn_key.is_ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '
'[{0.certificate_error.__class__.__name__}: '
'{0.certificate_error.args}]'.format(self))

Some files were not shown because too many files have changed in this diff Show More