venv update

This commit is contained in:
officereso 2021-03-27 16:21:31 -05:00
parent bc36667b81
commit dbebf53707
621 changed files with 112074 additions and 59281 deletions

1
.idea/dataSources.xml generated
View File

@ -8,6 +8,7 @@
<jdbc-url>jdbc:mysql://192.168.1.52:5618/</jdbc-url>
<driver-properties>
<property name="serverTimezone" value="UTC" />
<property name="allowPublicKeyRetrieval" value="TRUE" />
</driver-properties>
</data-source>
</component>

View File

@ -1,9 +1,4 @@
import asyncio
import math
from datetime import datetime, timedelta
import re
import discord
import pymysql
from discord.ext import commands
@ -17,7 +12,7 @@ class ParseForIssues(commands.Cog):
if message.author.id == 533427166193385494:
return
content = message.content.lower()
matches = re.findall("ttg-[0-9]+",content)
matches = re.findall("ttg-[0-9]+", content)
for match in matches:
await message.channel.send("https://youtrack.themissingcrowbar.com:8942/issue/"+match)

View File

@ -1,19 +0,0 @@
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
Copyright (c) Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -1,133 +0,0 @@
Metadata-Version: 2.1
Name: APScheduler
Version: 3.6.3
Summary: In-process task scheduler with Cron-like capabilities
Home-page: https://github.com/agronholm/apscheduler
Author: Alex Grönholm
Author-email: apscheduler@nextday.fi
License: MIT
Keywords: scheduling cron
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Requires-Dist: setuptools (>=0.7)
Requires-Dist: six (>=1.4.0)
Requires-Dist: pytz
Requires-Dist: tzlocal (>=1.2)
Requires-Dist: futures ; python_version == "2.7"
Requires-Dist: funcsigs ; python_version == "2.7"
Provides-Extra: asyncio
Requires-Dist: trollius ; (python_version == "2.7") and extra == 'asyncio'
Provides-Extra: doc
Requires-Dist: sphinx ; extra == 'doc'
Requires-Dist: sphinx-rtd-theme ; extra == 'doc'
Provides-Extra: gevent
Requires-Dist: gevent ; extra == 'gevent'
Provides-Extra: mongodb
Requires-Dist: pymongo (>=2.8) ; extra == 'mongodb'
Provides-Extra: redis
Requires-Dist: redis (>=3.0) ; extra == 'redis'
Provides-Extra: rethinkdb
Requires-Dist: rethinkdb (>=2.4.0) ; extra == 'rethinkdb'
Provides-Extra: sqlalchemy
Requires-Dist: sqlalchemy (>=0.8) ; extra == 'sqlalchemy'
Provides-Extra: testing
Requires-Dist: pytest ; extra == 'testing'
Requires-Dist: pytest-cov ; extra == 'testing'
Requires-Dist: pytest-tornado5 ; extra == 'testing'
Requires-Dist: mock ; (python_version == "2.7") and extra == 'testing'
Requires-Dist: pytest-asyncio (<0.6) ; (python_version == "3.4") and extra == 'testing'
Requires-Dist: pytest-asyncio ; (python_version >= "3.5") and extra == 'testing'
Provides-Extra: tornado
Requires-Dist: tornado (>=4.3) ; extra == 'tornado'
Provides-Extra: twisted
Requires-Dist: twisted ; extra == 'twisted'
Provides-Extra: zookeeper
Requires-Dist: kazoo ; extra == 'zookeeper'
.. image:: https://travis-ci.com/agronholm/apscheduler.svg?branch=master
:target: https://travis-ci.com/agronholm/apscheduler
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/apscheduler/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/apscheduler?branch=master
:alt: Code Coverage
Advanced Python Scheduler (APScheduler) is a Python library that lets you schedule your Python code
to be executed later, either just once or periodically. You can add new jobs or remove old ones on
the fly as you please. If you store your jobs in a database, they will also survive scheduler
restarts and maintain their state. When the scheduler is restarted, it will then run all the jobs
it should have run while it was offline [#f1]_.
Among other things, APScheduler can be used as a cross-platform, application specific replacement
to platform specific schedulers, such as the cron daemon or the Windows task scheduler. Please
note, however, that APScheduler is **not** a daemon or service itself, nor does it come with any
command line tools. It is primarily meant to be run inside existing applications. That said,
APScheduler does provide some building blocks for you to build a scheduler service or to run a
dedicated scheduler process.
APScheduler has three built-in scheduling systems you can use:
* Cron-style scheduling (with optional start/end times)
* Interval-based execution (runs jobs on even intervals, with optional start/end times)
* One-off delayed execution (runs jobs once, on a set date/time)
You can mix and match scheduling systems and the backends where the jobs are stored any way you
like. Supported backends for storing jobs include:
* Memory
* `SQLAlchemy <http://www.sqlalchemy.org/>`_ (any RDBMS supported by SQLAlchemy works)
* `MongoDB <http://www.mongodb.org/>`_
* `Redis <http://redis.io/>`_
* `RethinkDB <https://www.rethinkdb.com/>`_
* `ZooKeeper <https://zookeeper.apache.org/>`_
APScheduler also integrates with several common Python frameworks, like:
* `asyncio <http://docs.python.org/3.4/library/asyncio.html>`_ (:pep:`3156`)
* `gevent <http://www.gevent.org/>`_
* `Tornado <http://www.tornadoweb.org/>`_
* `Twisted <http://twistedmatrix.com/>`_
* `Qt <http://qt-project.org/>`_ (using either
`PyQt <http://www.riverbankcomputing.com/software/pyqt/intro>`_ or
`PySide <http://qt-project.org/wiki/PySide>`_)
.. [#f1] The cutoff period for this is also configurable.
Documentation
-------------
Documentation can be found `here <http://readthedocs.org/docs/apscheduler/en/latest/>`_.
Source
------
The source can be browsed at `Github <https://github.com/agronholm/apscheduler>`_.
Reporting bugs
--------------
A `bug tracker <https://github.com/agronholm/apscheduler/issues>`_ is provided by Github.
Getting help
------------
If you have problems or other questions, you can either:
* Ask in the `apscheduler <https://gitter.im/apscheduler/Lobby>`_ room on Gitter
* Ask on the `APScheduler Google group <http://groups.google.com/group/apscheduler>`_, or
* Ask on `StackOverflow <http://stackoverflow.com/questions/tagged/apscheduler>`_ and tag your
question with the ``apscheduler`` tag

View File

@ -1,84 +0,0 @@
APScheduler-3.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
APScheduler-3.6.3.dist-info/LICENSE.txt,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
APScheduler-3.6.3.dist-info/METADATA,sha256=VHah1X4AqMCGgcvEm06M-pAqmNC9q4tOQRbUv3b0Jh0,5398
APScheduler-3.6.3.dist-info/RECORD,,
APScheduler-3.6.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
APScheduler-3.6.3.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
APScheduler-3.6.3.dist-info/entry_points.txt,sha256=7RgkYN_OYyCUQtIGhj-UNcelnIjsNm7nC9rogdMQh3U,1148
APScheduler-3.6.3.dist-info/top_level.txt,sha256=O3oMCWxG-AHkecUoO6Ze7-yYjWrttL95uHO8-RFdYvE,12
apscheduler/__init__.py,sha256=qFEK2ysRBcLiYmm3deyJJ1avUOugaM_nCGHMD42WMBw,380
apscheduler/__pycache__/__init__.cpython-36.pyc,,
apscheduler/__pycache__/events.cpython-36.pyc,,
apscheduler/__pycache__/job.cpython-36.pyc,,
apscheduler/__pycache__/util.cpython-36.pyc,,
apscheduler/events.py,sha256=KRMTDQUS6d2uVnrQvPoz3ZPV5V9XKsCAZLsgx913FFo,3593
apscheduler/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apscheduler/executors/__pycache__/__init__.cpython-36.pyc,,
apscheduler/executors/__pycache__/asyncio.cpython-36.pyc,,
apscheduler/executors/__pycache__/base.cpython-36.pyc,,
apscheduler/executors/__pycache__/base_py3.cpython-36.pyc,,
apscheduler/executors/__pycache__/debug.cpython-36.pyc,,
apscheduler/executors/__pycache__/gevent.cpython-36.pyc,,
apscheduler/executors/__pycache__/pool.cpython-36.pyc,,
apscheduler/executors/__pycache__/tornado.cpython-36.pyc,,
apscheduler/executors/__pycache__/twisted.cpython-36.pyc,,
apscheduler/executors/asyncio.py,sha256=ji5f6Qm2uGhov-3w52CXHZi8jc5U_gS56lisQylKTBQ,2087
apscheduler/executors/base.py,sha256=hogiMc_t-huw6BMod0HEeY2FhRNmAAUyNNuBHvIX31M,5336
apscheduler/executors/base_py3.py,sha256=s_4siAjBHrr7JZnm64VVow9zyvs2JBc-VRPkPuDeBTI,1775
apscheduler/executors/debug.py,sha256=15_ogSBzl8RRCfBYDnkIV2uMH8cLk1KImYmBa_NVGpc,573
apscheduler/executors/gevent.py,sha256=aulrNmoefyBgrOkH9awRhFiXIDnSCnZ4U0o0_JXIXgc,777
apscheduler/executors/pool.py,sha256=q9TC6KzwWI9tpLNxQhdrKRWFtsN5dmx_Vegu23BV-Sk,1672
apscheduler/executors/tornado.py,sha256=DU75VaQ9R6nBuy8lbPUvDKUgsuJcZqwAvURC5vg3r6w,1780
apscheduler/executors/twisted.py,sha256=bRoU0C4BoVcS6_BjKD5wfUs0IJpGkmLsRAcMH2rJJss,778
apscheduler/job.py,sha256=zT9_GuOpxuxEPVZU38tantw9383tAPRBPoH6dd4uHGA,11088
apscheduler/jobstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apscheduler/jobstores/__pycache__/__init__.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/base.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/memory.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/mongodb.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/redis.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/rethinkdb.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/sqlalchemy.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/zookeeper.cpython-36.pyc,,
apscheduler/jobstores/base.py,sha256=DXzSW9XscueHZHMvy1qFiG-vYqUl_MMv0n0uBSZWXGo,4523
apscheduler/jobstores/memory.py,sha256=ZxWiKsqfsCHFvac-6X9BztuhnuSxlOYi1dhT6g-pjQo,3655
apscheduler/jobstores/mongodb.py,sha256=e9KNzPFrjiVpiM3iPT_c0ONxZQT70VCF2rDXW0-22zk,5296
apscheduler/jobstores/redis.py,sha256=kjQDIzPXz-Yq976U9HK3aMkcCI_QRLKgTADQWKewtik,5483
apscheduler/jobstores/rethinkdb.py,sha256=k1rSLYJqejuhQxJY3pXwHAQYcpZ1QFJsoQ8n0oEu5MM,5863
apscheduler/jobstores/sqlalchemy.py,sha256=5H5T05cQ2ZtkRuRb8hKkcLzZSQneAT13NMKXby3nzWE,6122
apscheduler/jobstores/zookeeper.py,sha256=BzyqZ08XIDcbu5frQWGmDVEHAEScNxjt8oML6Tty8j8,6406
apscheduler/schedulers/__init__.py,sha256=jM63xA_K7GSToBenhsz-SCcqfhk1pdEVb6ajwoO5Kqg,406
apscheduler/schedulers/__pycache__/__init__.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/asyncio.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/background.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/base.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/blocking.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/gevent.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/qt.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/tornado.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/twisted.cpython-36.pyc,,
apscheduler/schedulers/asyncio.py,sha256=0j0mcDpf-zI_vQHcUCZZtBfEEZEiocEOZ767efIZ5YM,2082
apscheduler/schedulers/background.py,sha256=dGX0T0z6T6HzZHG7njWgp90SFHpetZ4ZBUV2gGOSqoc,1505
apscheduler/schedulers/base.py,sha256=EUGbQ5R2jGA4PEEehU2ASuKVe0SsLqtWESAtTqAJW50,42863
apscheduler/schedulers/blocking.py,sha256=c-5YR-dKn3D82tPt38t50KGPJrAiC852v8ai2Vwanmg,924
apscheduler/schedulers/gevent.py,sha256=csPBvV75FGcboXXsdex6fCD7J54QgBddYNdWj62ZO9g,1031
apscheduler/schedulers/qt.py,sha256=AhHU62ybOOVSD4OhMwoPRRUCoM5cf5q26uD3hPglfnc,1297
apscheduler/schedulers/tornado.py,sha256=D9Vaq3Ee9EFiXa1jDy9tedI048gR_YT_LAFUWqO_uEw,1926
apscheduler/schedulers/twisted.py,sha256=D5EBjjMRtMBxy0_aAURcULAI8Ky2IvCTr9tK9sO1rYk,1844
apscheduler/triggers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apscheduler/triggers/__pycache__/__init__.cpython-36.pyc,,
apscheduler/triggers/__pycache__/base.cpython-36.pyc,,
apscheduler/triggers/__pycache__/combining.cpython-36.pyc,,
apscheduler/triggers/__pycache__/date.cpython-36.pyc,,
apscheduler/triggers/__pycache__/interval.cpython-36.pyc,,
apscheduler/triggers/base.py,sha256=WMo5f2g14fjO5VzpIxFQtk47Z9VEUDDPSxjoPL9FGSQ,1837
apscheduler/triggers/combining.py,sha256=WTEnaEkBHysF1009sCvBaQa99hiy9l5Oz-hHyjy3jv8,3473
apscheduler/triggers/cron/__init__.py,sha256=a8ASzvM7ci-djOI2jIL2XErL6zEx4Wr1012aD1XJw_w,9246
apscheduler/triggers/cron/__pycache__/__init__.cpython-36.pyc,,
apscheduler/triggers/cron/__pycache__/expressions.cpython-36.pyc,,
apscheduler/triggers/cron/__pycache__/fields.cpython-36.pyc,,
apscheduler/triggers/cron/expressions.py,sha256=hu1kq0mKvivIw7U0D0Nnrbuk3q01dCuhZ7SHRPw6qhI,9184
apscheduler/triggers/cron/fields.py,sha256=NWPClh1NgSOpTlJ3sm1TXM_ViC2qJGKWkd_vg0xsw7o,3510
apscheduler/triggers/date.py,sha256=RrfB1PNO9G9e91p1BOf-y_TseVHQQR-KJPhNdPpAHcU,1705
apscheduler/triggers/interval.py,sha256=LiIunGOd96yaiAceG1XGP8eY3JxSyHDWCipVhQWMzDU,4381
apscheduler/util.py,sha256=bQLVYP-RHtjypxol40a_JPT1Ta9BYSlTNdsDTc7dNMU,13963

View File

@ -1,24 +0,0 @@
[apscheduler.executors]
asyncio = apscheduler.executors.asyncio:AsyncIOExecutor [asyncio]
debug = apscheduler.executors.debug:DebugExecutor
gevent = apscheduler.executors.gevent:GeventExecutor [gevent]
processpool = apscheduler.executors.pool:ProcessPoolExecutor
threadpool = apscheduler.executors.pool:ThreadPoolExecutor
tornado = apscheduler.executors.tornado:TornadoExecutor [tornado]
twisted = apscheduler.executors.twisted:TwistedExecutor [twisted]
[apscheduler.jobstores]
memory = apscheduler.jobstores.memory:MemoryJobStore
mongodb = apscheduler.jobstores.mongodb:MongoDBJobStore [mongodb]
redis = apscheduler.jobstores.redis:RedisJobStore [redis]
rethinkdb = apscheduler.jobstores.rethinkdb:RethinkDBJobStore [rethinkdb]
sqlalchemy = apscheduler.jobstores.sqlalchemy:SQLAlchemyJobStore [sqlalchemy]
zookeeper = apscheduler.jobstores.zookeeper:ZooKeeperJobStore [zookeeper]
[apscheduler.triggers]
and = apscheduler.triggers.combining:AndTrigger
cron = apscheduler.triggers.cron:CronTrigger
date = apscheduler.triggers.date:DateTrigger
interval = apscheduler.triggers.interval:IntervalTrigger
or = apscheduler.triggers.combining:OrTrigger

View File

@ -1,19 +0,0 @@
Copyright (c) 2010, 2013 PyMySQL contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,189 +0,0 @@
Metadata-Version: 2.1
Name: PyMySQL
Version: 0.10.1
Summary: Pure Python MySQL Driver
Home-page: https://github.com/PyMySQL/PyMySQL/
Author: yutaka.matsubara
Author-email: yutaka.matsubara@gmail.com
Maintainer: Inada Naoki
Maintainer-email: songofacandy@gmail.com
License: "MIT"
Project-URL: Documentation, https://pymysql.readthedocs.io/
Keywords: MySQL
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Database
Provides-Extra: ed25519
Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
Provides-Extra: rsa
Requires-Dist: cryptography ; extra == 'rsa'
.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest
:target: https://pymysql.readthedocs.io/
:alt: Documentation Status
.. image:: https://badge.fury.io/py/PyMySQL.svg
:target: https://badge.fury.io/py/PyMySQL
.. image:: https://travis-ci.org/PyMySQL/PyMySQL.svg?branch=master
:target: https://travis-ci.org/PyMySQL/PyMySQL
.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master
.. image:: https://img.shields.io/badge/license-MIT-blue.svg
:target: https://github.com/PyMySQL/PyMySQL/blob/master/LICENSE
PyMySQL
=======
.. contents:: Table of Contents
:local:
This package contains a pure-Python MySQL client library, based on `PEP 249`_.
Most public APIs are compatible with mysqlclient and MySQLdb.
NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`,
`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_.
But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover
their usecase.
.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/
Requirements
-------------
* Python -- one of the following:
- CPython_ : 2.7 and >= 3.5
- PyPy_ : Latest version
* MySQL Server -- one of the following:
- MySQL_ >= 5.5
- MariaDB_ >= 5.5
.. _CPython: https://www.python.org/
.. _PyPy: https://pypy.org/
.. _MySQL: https://www.mysql.com/
.. _MariaDB: https://mariadb.org/
Installation
------------
Package is uploaded on `PyPI <https://pypi.org/project/PyMySQL>`_.
You can install it with pip::
$ python3 -m pip install PyMySQL
To use "sha256_password" or "caching_sha2_password" for authenticate,
you need to install additional dependency::
$ python3 -m pip install PyMySQL[rsa]
To use MariaDB's "ed25519" authentication method, you need to install
additional dependency::
$ python3 -m pip install PyMySQL[ed25519]
Documentation
-------------
Documentation is available online: https://pymysql.readthedocs.io/
For support, please refer to the `StackOverflow
<https://stackoverflow.com/questions/tagged/pymysql>`_.
Example
-------
The following examples make use of a simple table
.. code:: sql
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(255) COLLATE utf8_bin NOT NULL,
`password` varchar(255) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin
AUTO_INCREMENT=1 ;
.. code:: python
import pymysql.cursors
# Connect to the database
connection = pymysql.connect(host='localhost',
user='user',
password='passwd',
db='db',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
cursor.execute(sql, ('webmaster@python.org',))
result = cursor.fetchone()
print(result)
finally:
connection.close()
This example will print:
.. code:: python
{'password': 'very-secret', 'id': 1}
Resources
---------
* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/
* MySQL Reference Manuals: https://dev.mysql.com/doc/
* MySQL client/server protocol:
https://dev.mysql.com/doc/internals/en/client-server-protocol.html
* "Connector" channel in MySQL Community Slack:
https://lefred.be/mysql-community-on-slack/
* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users
License
-------
PyMySQL is released under the MIT License. See LICENSE for more information.

View File

@ -1,50 +0,0 @@
PyMySQL-0.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyMySQL-0.10.1.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
PyMySQL-0.10.1.dist-info/METADATA,sha256=SP0KPSfmgNJ2ujhGRrRRiWOodzv62BfYnbY1OXX3DTI,5481
PyMySQL-0.10.1.dist-info/RECORD,,
PyMySQL-0.10.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
PyMySQL-0.10.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
PyMySQL-0.10.1.dist-info/pbr.json,sha256=Lqvh8-9N7qS6SLUlEJ5GDLWioQcvR9n1WWjMEfJ5mv8,47
PyMySQL-0.10.1.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
pymysql/__init__.py,sha256=KDHcmnEoEDMmRPNO5JFcxb7lsypDmwGn5Td-f-X6xDY,4733
pymysql/__pycache__/__init__.cpython-36.pyc,,
pymysql/__pycache__/_auth.cpython-36.pyc,,
pymysql/__pycache__/_compat.cpython-36.pyc,,
pymysql/__pycache__/_socketio.cpython-36.pyc,,
pymysql/__pycache__/charset.cpython-36.pyc,,
pymysql/__pycache__/connections.cpython-36.pyc,,
pymysql/__pycache__/converters.cpython-36.pyc,,
pymysql/__pycache__/cursors.cpython-36.pyc,,
pymysql/__pycache__/err.cpython-36.pyc,,
pymysql/__pycache__/optionfile.cpython-36.pyc,,
pymysql/__pycache__/protocol.cpython-36.pyc,,
pymysql/__pycache__/times.cpython-36.pyc,,
pymysql/__pycache__/util.cpython-36.pyc,,
pymysql/_auth.py,sha256=pEeHBpQ15h2wfj6k7np6LVHVz34whEXSs5KrqeYtDGw,9564
pymysql/_compat.py,sha256=DSxMV2ib-rhIuQIKiXX44yds_0bN2M_RddfYQiSdB6U,481
pymysql/_socketio.py,sha256=smsw4wudNM4CKl85uis8QHfjDhz2iXQRvl8QV4TmB1w,4049
pymysql/charset.py,sha256=zaaRbEQrFiE0iCd3AB52WJY9VqVxQcp8sYcoPDlPdWI,10308
pymysql/connections.py,sha256=xR0gWxvQ6IxBcFhY9JPmYRCcvs6xSnRKUq-DZ6MpfNY,49010
pymysql/constants/CLIENT.py,sha256=cPMxnQQbBG6xqaEDwqzggTfWIuJQ1Oy7HrIgw_vgpo4,853
pymysql/constants/COMMAND.py,sha256=ypGdEUmi8m9cdBZ3rDU6mb7bsIyu9ldCDvc4pNF7V70,680
pymysql/constants/CR.py,sha256=5ojVkbisyw7Qo_cTNpnHYvV6xHRZXK39Qqv8tjGbIbg,2228
pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
pymysql/constants/FIELD_TYPE.py,sha256=yHZLSyQewMxTDx4PLrI1H_iwH2FnsrgBZFa56UG2HiQ,372
pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
pymysql/constants/SERVER_STATUS.py,sha256=KogVCOrV-S5aAFwyVKeKgua13nwdt1WFyHagjCZbcpM,334
pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pymysql/constants/__pycache__/CLIENT.cpython-36.pyc,,
pymysql/constants/__pycache__/COMMAND.cpython-36.pyc,,
pymysql/constants/__pycache__/CR.cpython-36.pyc,,
pymysql/constants/__pycache__/ER.cpython-36.pyc,,
pymysql/constants/__pycache__/FIELD_TYPE.cpython-36.pyc,,
pymysql/constants/__pycache__/FLAG.cpython-36.pyc,,
pymysql/constants/__pycache__/SERVER_STATUS.cpython-36.pyc,,
pymysql/constants/__pycache__/__init__.cpython-36.pyc,,
pymysql/converters.py,sha256=kUT2KQdkqNTuSxzURVnQKS1ZcatoFTUfYe5b5QSJuRI,11055
pymysql/cursors.py,sha256=eiP_oTDi1MM5EYLHoecwbv5BXWJ1qEjfK8Uy3SjGEcs,16250
pymysql/err.py,sha256=Vdrt2rVaSePVlB_uy0JNoeN6zYBt0_mM1UFDighLgNM,3734
pymysql/optionfile.py,sha256=4yW8A7aAR2Aild7ibLOCzIlTCcYd90PtR8LRGJSZs8o,658
pymysql/protocol.py,sha256=9hAfVK-g4i53gHMoGj9QrPApywMYVM8oxGAuKb_-PXo,12071
pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
pymysql/util.py,sha256=jKPts8cOMIXDndjsV3783VW-iq9uMxETWqfHP6Bd-Zo,180

View File

@ -1,6 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.34.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View File

@ -1 +0,0 @@
{"is_release": false, "git_version": "08bac52"}

View File

@ -1,19 +0,0 @@
Copyright 2005-2020 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,210 +0,0 @@
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 1.3.22
Summary: Database Abstraction Library
Home-page: http://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Classifier: Operating System :: OS Independent
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mysql
Requires-Dist: mysqlclient ; extra == 'mysql'
Provides-Extra: oracle
Requires-Dist: cx-oracle ; extra == 'oracle'
Provides-Extra: postgresql
Requires-Dist: psycopg2 ; extra == 'postgresql'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: pymysql
Requires-Dist: pymysql ; extra == 'pymysql'
SQLAlchemy
==========
|PyPI| |Python| |Downloads|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Python Version
.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Downloads
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-
initiated decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
http://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <http://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <http://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <http://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<http://www.opensource.org/licenses/mit-license.php>`_.

View File

@ -1,408 +0,0 @@
SQLAlchemy-1.3.22.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
SQLAlchemy-1.3.22.dist-info/LICENSE,sha256=JYpB5k2IR1Y8Ym9F--J9Vsa25hcfRa01rPMRLUZR7eM,1119
SQLAlchemy-1.3.22.dist-info/METADATA,sha256=CoXBOklkuv5C54I0lpgAWD3eo1WsfsnGvz59qYFRGlo,7789
SQLAlchemy-1.3.22.dist-info/RECORD,,
SQLAlchemy-1.3.22.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
SQLAlchemy-1.3.22.dist-info/WHEEL,sha256=epucrC2yyYTysDCMzXuz8eGMTMKryzRfNOvMGdslbjc,101
SQLAlchemy-1.3.22.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
sqlalchemy/__init__.py,sha256=oGbsCCcNv7nfI2dHYxm9SnD2SIFGeMKQq-u6hwQuTxs,4940
sqlalchemy/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/__pycache__/events.cpython-36.pyc,,
sqlalchemy/__pycache__/exc.cpython-36.pyc,,
sqlalchemy/__pycache__/inspection.cpython-36.pyc,,
sqlalchemy/__pycache__/interfaces.cpython-36.pyc,,
sqlalchemy/__pycache__/log.cpython-36.pyc,,
sqlalchemy/__pycache__/processors.cpython-36.pyc,,
sqlalchemy/__pycache__/schema.cpython-36.pyc,,
sqlalchemy/__pycache__/types.cpython-36.pyc,,
sqlalchemy/connectors/__init__.py,sha256=D7659JWloMtbnvSb-MkRJjs8-Ecgwa7uI2XQbmMfLYg,288
sqlalchemy/connectors/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/connectors/__pycache__/mxodbc.cpython-36.pyc,,
sqlalchemy/connectors/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/connectors/__pycache__/zxJDBC.cpython-36.pyc,,
sqlalchemy/connectors/mxodbc.py,sha256=KNOaaXrHzFfIEUgbXDyslCpNfBVQbCkUIPO8eCSRIQA,5504
sqlalchemy/connectors/pyodbc.py,sha256=x0-C7TxZAbM6NAiBD-wazJqi9GSqqCk9F0aex76Nd4s,6167
sqlalchemy/connectors/zxJDBC.py,sha256=vuWKN_K8DDLp_lBwzeIJkexgXJQrwtqDdeZqabQu_5I,1946
sqlalchemy/cprocessors.cp36-win_amd64.pyd,sha256=avUe-wnlnSJS5cmuR5ucOAAoSa5z57mi9sBBiUnPyJ8,17408
sqlalchemy/cresultproxy.cp36-win_amd64.pyd,sha256=XOvcyT-TdwxcII6txz_Ln9jkyqBPPVbanJx47Wkd2Rw,18432
sqlalchemy/cutils.cp36-win_amd64.pyd,sha256=sZzj-sIrsBWAPiw_xHIJiIes1s5FDoY-t8kbTZ6Iq1s,11264
sqlalchemy/databases/__init__.py,sha256=6fOESInuF0zWrGeomJyeO3eZ27yx0bvPfNLDwMFQuyo,851
sqlalchemy/databases/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/__init__.py,sha256=tuHFeqV6ldzs8J_AwgVb19BNzvN1KkbK1kQqghXgtZs,1981
sqlalchemy/dialects/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__init__.py,sha256=xX-oB-TsUHwwQcz3_lDpUUpN1hK0aoYN7aFs3yaYaug,1193
sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-36.pyc,,
sqlalchemy/dialects/firebird/base.py,sha256=fAKJL4xr0XnWPsuYg4OYWMwVaoigV4T8ia5L8zSWvV8,31278
sqlalchemy/dialects/firebird/fdb.py,sha256=962JWKObYNHp_gm5Yp6rw1kGhKyTY9xmftfBqkyAHZA,4189
sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=Nc90QW_ieIr7UXf3niFS0s1T9rtHCMcrVYTu9UeyY50,6638
sqlalchemy/dialects/mssql/__init__.py,sha256=0DbqS4ifYplCCsxj00PKZKIK945fbTeXQPatTkFek_w,1897
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-36.pyc,,
sqlalchemy/dialects/mssql/adodbapi.py,sha256=2iTqjUMZhGX2EcDRvxXKcDJPvrB0H-1mZOtmahuDIvA,2809
sqlalchemy/dialects/mssql/base.py,sha256=tmNAFdUg2UIMn9WklQx2dRtfwC2eIix_8z52XRfK0Zs,93907
sqlalchemy/dialects/mssql/information_schema.py,sha256=VlG-2McJ1m81k5Q5FWNGXLcIJ_VZ333bGNy_vTZ4wnk,5814
sqlalchemy/dialects/mssql/mxodbc.py,sha256=DRqhxaMNvnaW8PeSqx9QikkoLaYznIapdHUdJGXz5GQ,4756
sqlalchemy/dialects/mssql/provision.py,sha256=GcMMMQYeRKDYV5YOThDaytZ6Z-zTThFkhuOOzXfloxM,2865
sqlalchemy/dialects/mssql/pymssql.py,sha256=gk7O2RWn3ShkIzjnbNWuX_PAO3Vp_gh5M_MRFK7juiE,4921
sqlalchemy/dialects/mssql/pyodbc.py,sha256=TMl11g3ZIS2N6iiPKrw6WZp-HuL-5QyXOhGnxEn_c44,16782
sqlalchemy/dialects/mssql/zxjdbc.py,sha256=ZLOq6QNPV_ljOFo5uDMq99MhqX1yqUNtTAQppc4V4IA,2382
sqlalchemy/dialects/mysql/__init__.py,sha256=X3KjSF_mLNSiCo_bB2GdfZ-0337jskSm6UxIUnrOo1g,2155
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/types.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-36.pyc,,
sqlalchemy/dialects/mysql/base.py,sha256=qvN_IrLrStClGFxq1iMAEGUI8UTglvAkyp9enH4k-fg,109610
sqlalchemy/dialects/mysql/cymysql.py,sha256=smZp4gH5Js-GWi5QeW0mV76CQoqVTteAr_FZwjcGE7k,2326
sqlalchemy/dialects/mysql/dml.py,sha256=iap_GsuLA_KVdav_aOLPP9tHmXo3gP7IIoBhCRcZlTE,4903
sqlalchemy/dialects/mysql/enumerated.py,sha256=OKO-xo7RBVSvcJxC5tTTJqbu0KzCDmGL-5xlvTRejEU,11621
sqlalchemy/dialects/mysql/gaerdbms.py,sha256=FNNG3WAK_R92wLxgfpixtV1cZ-WawSalDDxnvL8LnTQ,3477
sqlalchemy/dialects/mysql/json.py,sha256=tP1xhztSdXybTiI-MjgNJGt6Y3xDZM_ZmGn2n3RcyQY,2121
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=3vawcKdZ5CPWDb_nacbanc5V9VKCwDdFyy-XCjo5AHg,8135
sqlalchemy/dialects/mysql/mysqldb.py,sha256=1C4rq0pMv_3HuoVN5CLw5pagZD4YtxDt5TFRfu4M4_Q,8651
sqlalchemy/dialects/mysql/oursql.py,sha256=APJcOSq8P6ZVkWcPzeVo0021AGr6LxlqMEMfJpEX7EA,8348
sqlalchemy/dialects/mysql/provision.py,sha256=8TsTjBn0mKOg5KtxoSxS5RjWnTAZmx2D-QsMtWAhBRo,1309
sqlalchemy/dialects/mysql/pymysql.py,sha256=qHZNSGjhPu6RXnyjZA_jIwSgDVnr9FGqcx-8gLvDsqE,2525
sqlalchemy/dialects/mysql/pyodbc.py,sha256=J_SiU7l5f0MOu3ZE9nm5GuXhNV6kPrBHKvkAZUAsILU,3580
sqlalchemy/dialects/mysql/reflection.py,sha256=YXTnwNtLys9BOq395d4rGj8CcpJT6ME9P2Bxu6WjSf4,18818
sqlalchemy/dialects/mysql/types.py,sha256=L1qrnZLLfwvemhs5c7dqckA6058miMFphhgxKYRhlVs,25362
sqlalchemy/dialects/mysql/zxjdbc.py,sha256=FcP6Wo-oNbhJc2uMS7JK_MGz_OlsOoe4vu_EIUbSD0I,4090
sqlalchemy/dialects/oracle/__init__.py,sha256=BV4VRUkPudVQHxKWGAWF0GJpFSy8D2R11i_ITxD2NMg,1316
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-36.pyc,,
sqlalchemy/dialects/oracle/base.py,sha256=hu_YwI1IFLXvq7OAQGWO55RlEeCUdTT_9mTGVQggwts,80046
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=RptY_Wmc5SaurrljUTTMd6i2-RVwSqk-7WVteMqjYX0,48218
sqlalchemy/dialects/oracle/provision.py,sha256=C6d0eGCJnrEp0ttDtBapmGiG3vVT4QDP1H2nF5KlLIM,3972
sqlalchemy/dialects/oracle/zxjdbc.py,sha256=hZEP9oglXP7sYjCwsNkyDJ3HWNmxZC4aclK1dfJfv64,8461
sqlalchemy/dialects/postgresql/__init__.py,sha256=GCrneOsxBcbkmiJMNMiKJJGfcJqMkKYKJLrme8qFTX8,2577
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/array.py,sha256=33Heu13kSDZlDfJqPOCXNKt9KzIZ276vhCWecNSnovM,12428
sqlalchemy/dialects/postgresql/base.py,sha256=y14qLd2rYBLQLeSkEj5oav9fYFfK6KqZbEIhyAFWAcI,131828
sqlalchemy/dialects/postgresql/dml.py,sha256=fYDe7w79jgLknblFwGr3arVstq-daaqftAUyfGK0zkk,8022
sqlalchemy/dialects/postgresql/ext.py,sha256=ae0lgzCkCMzz_7MSAQNyhyK0G9XyGdNAz4K9q84O2XE,7768
sqlalchemy/dialects/postgresql/hstore.py,sha256=8dePOlF-7k8i4ixfNMzE4LLc227jdEHNTFCrpkMcBAY,12861
sqlalchemy/dialects/postgresql/json.py,sha256=UB-eM0FK965v5sJTG3kFfqbt4_zj9Z7wsEapUNhgkhY,10418
sqlalchemy/dialects/postgresql/pg8000.py,sha256=EPiyPEdFZ5xeF-3lN9bEuxd1KpPUxIRphR44rMrkU4A,10054
sqlalchemy/dialects/postgresql/provision.py,sha256=zZUgATRaS2qMCL2vpzX5KepX9Adu5aWVyHsSGK-b7_4,2072
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=wAWYmJMuaBoBfW3Qi2g5K8uMZZkxZpMJdy3d8Eg-76M,38148
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=Gy9Al5AXnVUBgAwZkWbrARiSHs6V9tcBQjXwjB7Qecw,1716
sqlalchemy/dialects/postgresql/pygresql.py,sha256=snx_vIOVR_P3sUf0IMD190dPwO8nah4oFY-Dg36Blto,8395
sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=ibku8Pw0A2rAYB_O_TMzi2gcW1bMI4l8txJ2AZTG3dY,3024
sqlalchemy/dialects/postgresql/ranges.py,sha256=xLHMLwLuV9WnUSg-ncuKyQJw2qedxoZ9Sj5BU9t5O6M,4757
sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=qhr2-xaLcfoeCZpRyl5PCo6SNabwDE9v2q1aMw3BhkM,1463
sqlalchemy/dialects/sqlite/__init__.py,sha256=L-3xDyZP0fj3Ri0qYs6RAHoLfceMTIzRsYbJ3b1vBWw,1093
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/base.py,sha256=7zaK3ww7r-drH9PBkOHhrcjWanjTWtTspnD7o46VRB8,77282
sqlalchemy/dialects/sqlite/json.py,sha256=jTzeDsutf0mCe46kFx26J6E5O_Tmct0zpoSxiSkGbIw,2370
sqlalchemy/dialects/sqlite/provision.py,sha256=p3tIZ9fFsv99CNkpXppPLn9VbcjtiH5IJsil816G3LM,2666
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=7Imed4zA9kRECvn418aaN8Cn26k--QCmS7fjiNQ0cDI,4830
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=fhLPCRmosgCnVv1anLp5AL6lu0VXnRhRoSnXqGcm3oA,21511
sqlalchemy/dialects/sybase/__init__.py,sha256=Yi5Vq16WZ2lqih1PTuKTruSXjFvgFowYPY2QgGr-kiM,1430
sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-36.pyc,,
sqlalchemy/dialects/sybase/base.py,sha256=0KToWw8S_8wSDB84i3qzMxvIcpw2qXbA0EO2zju-As0,32873
sqlalchemy/dialects/sybase/mxodbc.py,sha256=KFz6sa0vd7M9Jp-agumMUT734PqaMPoGP-2T5gpUjQU,936
sqlalchemy/dialects/sybase/pyodbc.py,sha256=Ket-YHbBAH9z-ORWTwvv8z9LqlRbm9EVznfNXukAjDA,2205
sqlalchemy/dialects/sybase/pysybase.py,sha256=gyaL1P3RDDPvlZ7TMT8sifQasAwwntRd8lf0eQizITU,3417
sqlalchemy/engine/__init__.py,sha256=hLxpMJZ2W-DK-I_gEEiME-bWW_NVu-RE1ny_JFE5Mp4,25632
sqlalchemy/engine/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/base.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/default.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/interfaces.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/reflection.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/result.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/strategies.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/threadlocal.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/url.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/util.cpython-36.pyc,,
sqlalchemy/engine/base.py,sha256=SSEsf4G63VTAEcsicx4paVu3j6m8nfKVFbSnf8FAiIE,89571
sqlalchemy/engine/default.py,sha256=BCUfi29CXcnJkOhb9qGr8sS5yYheR4tHqnO35GTTWbQ,57293
sqlalchemy/engine/interfaces.py,sha256=3IJ6SPjdscyYZa5mywgm7cInyQ_xPWb0tCs_1UA3idU,49990
sqlalchemy/engine/reflection.py,sha256=csWPvH38f-a2YlpwoVEicWsMSZBlIanwnpkm6kB8eUg,35748
sqlalchemy/engine/result.py,sha256=qA9B6iIRgrst-p3Us3mgnZKmxByapF2SHDCszJ2w60c,56319
sqlalchemy/engine/strategies.py,sha256=UQ1gUY1Jim9EPJ1JwSVhEE0oCPuF1kbTamAN8Doe33Y,10166
sqlalchemy/engine/threadlocal.py,sha256=RZ31qQ34oFGTzJ-0k-vxW-hwlrQhPUfxzAJnG-ODjmw,4925
sqlalchemy/engine/url.py,sha256=bQzV-5JowgOQeqSmNYH03_pdiwMnXLABqvS5PjpCh_8,9755
sqlalchemy/engine/util.py,sha256=Cf2yL0fLIJlMG93ZGV1CFKtPoNOfJIK1F8x2AUwKzWk,2501
sqlalchemy/event/__init__.py,sha256=Ej6lUzFJg39963UAkSMEPP1OFSgxS0VmqqYVxEfhlqo,613
sqlalchemy/event/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/event/__pycache__/api.cpython-36.pyc,,
sqlalchemy/event/__pycache__/attr.cpython-36.pyc,,
sqlalchemy/event/__pycache__/base.cpython-36.pyc,,
sqlalchemy/event/__pycache__/legacy.cpython-36.pyc,,
sqlalchemy/event/__pycache__/registry.cpython-36.pyc,,
sqlalchemy/event/api.py,sha256=ycQNjI6kZHPykchMU-k6WGMwIttx2wCUUo1JEq7MYUI,7296
sqlalchemy/event/attr.py,sha256=kqVDB8bPPLGNmKkmV7A3EF7dMj0liFKcxZFW9d6waDE,14295
sqlalchemy/event/base.py,sha256=SGkPPaXEd40H8F_-8pU496yWmQudhi-_cSz46PHNOk4,10047
sqlalchemy/event/legacy.py,sha256=3QrQ0rPxLGKzwo4FeUtXvFbRxANFhJBRYj_kVGkQmz4,6081
sqlalchemy/event/registry.py,sha256=vLFXNVdxdxAoVxb60ZPbn0H39S8pAiSlRy-iAuEaGX0,8510
sqlalchemy/events.py,sha256=RT3Bhu_Es5cRSLjX1s9-lbFvnjpkWcUxk2Xw67xOdaw,54796
sqlalchemy/exc.py,sha256=_tgDgu1XS3hS7zo5hDSXBDG81E52Jgxsfmt8jMQm-4s,18441
sqlalchemy/ext/__init__.py,sha256=F-uIFRUKjn1LdWPFjy8E6AZLERfRBQNiVWQjZf1FWxM,333
sqlalchemy/ext/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/associationproxy.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/automap.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/baked.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/compiler.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/hybrid.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/indexable.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/instrumentation.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/mutable.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/orderinglist.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/serializer.cpython-36.pyc,,
sqlalchemy/ext/associationproxy.py,sha256=rZ40Wg8uU6VUFj8BLp-DAi_riPu5d_OV3vl5xX1IJ74,51583
sqlalchemy/ext/automap.py,sha256=c1q8-DcNvK9bb_fXDQXQkLok43gbfw_Dd2BmrYlDvls,43294
sqlalchemy/ext/baked.py,sha256=tLUjs7aR5cbHpn6mD7Zu3O5k1JPDGfd3gX6WzbltjfY,22671
sqlalchemy/ext/compiler.py,sha256=WTLu3SZaabXzLGnHju2Sch0Wjm38qRhl0GavhAdvLok,17633
sqlalchemy/ext/declarative/__init__.py,sha256=kBi5Tv7YWkjof4BOMSU7k0B8pt-xiS2FGPlOm-wHQG4,935
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/ext/declarative/__pycache__/api.cpython-36.pyc,,
sqlalchemy/ext/declarative/__pycache__/base.cpython-36.pyc,,
sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-36.pyc,,
sqlalchemy/ext/declarative/api.py,sha256=1SRoRJiA0-dRE5o73hyp9ArStxImOVBjzfRna2E0t4U,29577
sqlalchemy/ext/declarative/base.py,sha256=PeL5Moulup9ma_W-4mfO0iqPjZstyeyuEBCYn7G6HRY,32945
sqlalchemy/ext/declarative/clsregistry.py,sha256=eL0b5GmyAVGMW8Xv-DGim5o6ZJkU_aKGFDubGehBpGE,12967
sqlalchemy/ext/horizontal_shard.py,sha256=t42Z4MwSvA00NbKwHlBea1JLmS4306wTSrov4tVRFG8,9399
sqlalchemy/ext/hybrid.py,sha256=kSoKBMxQSlQqHzK3SNbAzdaqYeMfdj7yYZnIdT-saPk,41561
sqlalchemy/ext/indexable.py,sha256=xAHcNTft78MteT8W7L4fwn2K9Bm6NZkcgswW3baVJsc,11606
sqlalchemy/ext/instrumentation.py,sha256=rDk7cBPATJLzVTXgk4eiMMZOfhNXqKN0ej5uGB3L8J0,14787
sqlalchemy/ext/mutable.py,sha256=RzKcEPcxGLlMICwllsnBXN9LMqdm8UhUG8qUiYYwcvQ,32766
sqlalchemy/ext/orderinglist.py,sha256=rknSocVvlnKJaajrn7YBzHf0X_vtonTtK3P7TpP5H5M,14288
sqlalchemy/ext/serializer.py,sha256=nd-Vdqr0ym0VT0QXBZzRJWB3q0hcfOpmDVcT86Lwlik,5957
sqlalchemy/inspection.py,sha256=iQWjerm0DQkFf9RpkwSRocMSUAVdIQrje75ez5vB09s,3123
sqlalchemy/interfaces.py,sha256=1H48NQh38EfFgZwFC7c7slmajPOrWgDRGu8G2gfIr-A,13103
sqlalchemy/log.py,sha256=2aS2vQI6b9k5JTqVGuGxpYM3AuzNo7lTNQpdTqacY2U,6927
sqlalchemy/orm/__init__.py,sha256=EASu-ndJDwkuheDg3Hsdg5p1I1QlHLoRXXq9cDJtX-U,10245
sqlalchemy/orm/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/attributes.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/base.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/collections.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/dependency.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/descriptor_props.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/dynamic.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/evaluator.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/events.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/exc.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/identity.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/instrumentation.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/interfaces.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/loading.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/mapper.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/path_registry.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/persistence.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/properties.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/query.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/relationships.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/scoping.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/session.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/state.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/strategies.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/strategy_options.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/sync.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/unitofwork.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/util.cpython-36.pyc,,
sqlalchemy/orm/attributes.py,sha256=Q88uEiT5SXog1jCUd6axkwzyzvdfxmqfBMrlamwKgNY,70139
sqlalchemy/orm/base.py,sha256=p44V2Li8iWV691USS9JJ9ZMvKO4NRXL9_EWYzOwPqPY,15882
sqlalchemy/orm/collections.py,sha256=CGoPDFpFoMJhpYiXcSPiBvsWpYMWki7cykSv27IqJAg,54505
sqlalchemy/orm/dependency.py,sha256=8rKv5Ln_moi3bl-NXuGOjr-4lECScIufxVSkyDxcc3o,47840
sqlalchemy/orm/deprecated_interfaces.py,sha256=MhCVgxy3JVfkUQ5VjduzJETg8Uvol7PhTA4ZINICK50,21331
sqlalchemy/orm/descriptor_props.py,sha256=slkon0LbI02QwUQrnlWJUJOuNzbvIs93BrAkFDXS3DY,29181
sqlalchemy/orm/dynamic.py,sha256=c_NBkzdFUel0vxXAjzFKrv6CKloxCEb9YKJEE020yGE,15117
sqlalchemy/orm/evaluator.py,sha256=oJOeyfowPK9tx90oFnN3oT2Lbpm-0zBC6jD3i70q_Ug,5633
sqlalchemy/orm/events.py,sha256=IfmKrDZ5iF2MvPpgECjNNG4mYachg7uxY2TEoYLIeLQ,107363
sqlalchemy/orm/exc.py,sha256=453LebxTLDMC4yQEXeAmehYmcGExwyjK0L4oKJK91GE,6822
sqlalchemy/orm/identity.py,sha256=SS1rHYGwGYBPi2_RxTNC1pKYAmM_FGDTFfq9vPI9qco,10802
sqlalchemy/orm/instrumentation.py,sha256=z-2huzGB_BGe2X4Qfu7rUKvYTCqi2YtMiBd4xLkoEuw,19178
sqlalchemy/orm/interfaces.py,sha256=AcMKh9IXf6P1id9cyqwo_eYOnaVBpLG-eJLr5XJy-jc,26626
sqlalchemy/orm/loading.py,sha256=FfYKa6XjcVj0sqwSEkQs9VziMA54E-DFJm5CCY8cN_0,34855
sqlalchemy/orm/mapper.py,sha256=PPTmTWVVIKhhEUzXhHJVuKtn0iCnSk8KCXKdZpqqeoQ,134392
sqlalchemy/orm/path_registry.py,sha256=kS6ZP7c-gHt0M17RhZImZirJctAohQDHYNVZEcxxGWI,14203
sqlalchemy/orm/persistence.py,sha256=vk8OJY3SONKx7Ylu_lT7ZU-jIASNKuhUXaUIf4NVmcA,68073
sqlalchemy/orm/properties.py,sha256=H9883ECMBSN6xMi411nJZuKH9iyYX2IVAnInE7luVj0,13069
sqlalchemy/orm/query.py,sha256=g-uvTtUOfaVUYk3xjie6rZ2HtOV1lwBtny0E-UlOQiY,186442
sqlalchemy/orm/relationships.py,sha256=YYRycNoWNhWsAOwnwJR0zQPtLneXYSiQ-f7XjcVJVaI,141581
sqlalchemy/orm/scoping.py,sha256=-8rmidcl_VcY6_F1FLM0UHl23IgliQMhfn-0P-VcWGU,6619
sqlalchemy/orm/session.py,sha256=g2f60o8Oy0QaS-j7dpmjcx76llr3L8K9_KxxmBLWQJ8,135243
sqlalchemy/orm/state.py,sha256=BKNajH3NFxoM9CXKdV9fttpCZOJ1BUpCUN9eECuIu00,31763
sqlalchemy/orm/strategies.py,sha256=U0QkBemiOFotZI-JhgbDilFOa9_6eePXj-lTs9CjnQU,89843
sqlalchemy/orm/strategy_options.py,sha256=MPkIUJcr079EHRTm_UlCTYhVLIenh-ztwi8msW2CMf4,59567
sqlalchemy/orm/sync.py,sha256=C6cVB2X6Roqv2HrRIiFOBRpnEgr6oEfGNS8oFCmf4-A,5990
sqlalchemy/orm/unitofwork.py,sha256=sDcZMx9xYDfRi2PZo9-REyribKxWwhpeMm7d9clG7Qk,25474
sqlalchemy/orm/util.py,sha256=HYD2mztX2FqmKUd3kskUGjRlEEAoovcowAZSBMk_7Gg,46790
sqlalchemy/pool/__init__.py,sha256=WCEmpV5aEJTcftwapYTqx9TfPHwTx8EQ2j9Gs2wyoMY,1535
sqlalchemy/pool/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/base.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/impl.cpython-36.pyc,,
sqlalchemy/pool/base.py,sha256=YU_pCjvSI9JjRawKXFSB2-FgJYqQ_CO1xbM6Kd5VVoA,37557
sqlalchemy/pool/dbapi_proxy.py,sha256=YEw0RXKNFbbvgaHwR3ZXwrPowmk1zMkCe52jdSvI9bk,4470
sqlalchemy/pool/impl.py,sha256=j-DCjlJsBWKHuco7749ukokFeSmULFMAgwVwbc1p1wc,15446
sqlalchemy/processors.py,sha256=olrOO9fpgp87fMHB2-pJle-CI4QSuaLZcFM7Xnop2Pk,5920
sqlalchemy/schema.py,sha256=Ff6Tx0-UY35XGpBPuBI6RrutTiv4TgY28-TdCMcCrPA,2526
sqlalchemy/sql/__init__.py,sha256=ued_W8_shnIaN3mFS2Il_GMLD1xmWbyCXHQPYJf8YNc,3895
sqlalchemy/sql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/annotation.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/compiler.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/crud.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/ddl.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/default_comparator.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/elements.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/expression.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/functions.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/naming.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/operators.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/schema.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/selectable.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/sqltypes.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/type_api.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/util.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/visitors.cpython-36.pyc,,
sqlalchemy/sql/annotation.py,sha256=Rc20evisG_XwBNK_uov9VOFOdO3nYLFXseihjbWNGvs,6935
sqlalchemy/sql/base.py,sha256=opPc1CggSMq8zku3JbLIPOIDNB00D6be2c7ulW8-k1Q,22390
sqlalchemy/sql/compiler.py,sha256=yFuSjpT0qScGO7YoJws1LcTkpoM_hWoxlKi-Bp2npoE,132128
sqlalchemy/sql/crud.py,sha256=abF4fOtpaauGSVyXjimnHNzjTMMnzJ5rqdHjweXlzhI,26690
sqlalchemy/sql/ddl.py,sha256=bJ_-gdduAOibfpXVA-oNuqX2Txmxcc1lGAmTDk_fnpM,42687
sqlalchemy/sql/default_comparator.py,sha256=O0k9mS7avmu7BKeB5bhyw5TVJIAyowaH28ZqJUtEPNE,12609
sqlalchemy/sql/dml.py,sha256=4P0XqKiKsfJtj2Pxn_4xcQEsOvlj1eoPpUpyesCfqL0,36624
sqlalchemy/sql/elements.py,sha256=deWSv8q9N4gtWVxkSpxBNwH5xkxgB10kiLIL6se2ZIA,166190
sqlalchemy/sql/expression.py,sha256=5MZNgs6Mv5V0K-f9gyVIsh6eUMI61O1qdZxJCJdu1BY,9476
sqlalchemy/sql/functions.py,sha256=nUdRQn90JSnX5qUC9__JTDtcohXyO_UH6w111rRWvpM,37002
sqlalchemy/sql/naming.py,sha256=p-BVZd3W_KIcKb7KksYWP4jBf3c1lgbfoiEd8K5t_qI,6071
sqlalchemy/sql/operators.py,sha256=-D0mXfiROxhMh4uFFbhLf5qAyyURaqOclZD8OYk8HgQ,44066
sqlalchemy/sql/schema.py,sha256=FZU_KqRx6DqMJBNU2Kx33_UNYYiTAxymBN6JmiKA8v0,180818
sqlalchemy/sql/selectable.py,sha256=egPfU9Lo5sLFiG2WXzSwTfeqVxaJsZkT5BksMKnSHPU,143622
sqlalchemy/sql/sqltypes.py,sha256=kC4gxQfOjPlGu5b2Q5O0ndmLAT6dGQnwNxueLbFYchI,104685
sqlalchemy/sql/type_api.py,sha256=L4Q6hDHDa3jfo4gsZMOXU9pY-lf4qtwcj_2EEyyriiM,53731
sqlalchemy/sql/util.py,sha256=0yHwgYlOJ6xmqjmsivD9BZWplq4w-ECtNvxRN2Fhidg,31006
sqlalchemy/sql/visitors.py,sha256=npwoEq-h8u8LAAN_OIX9oBf0Htb35pORff1xwA0m96Q,16426
sqlalchemy/testing/__init__.py,sha256=F6cKUmiLxkeYOlBjI4D8tKbVF12sJ1R5x5vKbQa7yZM,2941
sqlalchemy/testing/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/assertions.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/assertsql.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/config.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/engines.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/entities.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/exclusions.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/fixtures.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/mock.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/pickleable.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/profiling.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/replay_fixture.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/requirements.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/schema.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/util.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/warnings.cpython-36.pyc,,
sqlalchemy/testing/assertions.py,sha256=vUPrnQv_HmJ2QZhAPkD9Vky25SgCyknIizSk7e3VF_g,22978
sqlalchemy/testing/assertsql.py,sha256=l-f0MkUFKCnwQ1cj8g9WqXT-tjPIm0sLNUS0IYgMJX0,14009
sqlalchemy/testing/config.py,sha256=vOKsy7RCVgUsjUNlayfOZ0AaLBNm_suTTOY1q721714,5693
sqlalchemy/testing/engines.py,sha256=SxG2GXuYZgE-pSwKcs5CPvenzyff-qZVjtWz3IQiAak,10910
sqlalchemy/testing/entities.py,sha256=2LJdshRUYWMvazvgUrAcGkTeWEnNoiI_y1NSKcfsRSw,3312
sqlalchemy/testing/exclusions.py,sha256=GuPlFXj7NxPDJfrYbPGfuOdRYFfO28Tp945gH42FnXY,13490
sqlalchemy/testing/fixtures.py,sha256=9jzKNlfPsD3P3UbQmPaTzPHLkuoAhXo5XNmCooT32hU,15556
sqlalchemy/testing/mock.py,sha256=iomOaukRMIAdt2Y3TxUcmvVH-1SMwkwwIJJZvTxyblM,925
sqlalchemy/testing/pickleable.py,sha256=Mnym0utxflQ0Htlx5hbPxTA9t_RFpuhB_zBFNuk7q-s,2834
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-36.pyc,,
sqlalchemy/testing/plugin/bootstrap.py,sha256=q6FSeGdWtNutyrrwWyDdUXvVDPwI7owgiZ7sXjZUw4I,1513
sqlalchemy/testing/plugin/plugin_base.py,sha256=9FOAdlEnudyx5VuLMA7Q15d92SWjY6LAIR1_-Tw9On0,21095
sqlalchemy/testing/plugin/pytestplugin.py,sha256=JIiimunDVCq7stuVXBvd9FnUPc0odqAIi9kxa7AiW9M,16570
sqlalchemy/testing/profiling.py,sha256=dHC5hL9sBUMbXTMETOH9p9s0Y7_LvkF9xTuY8cCqzt8,9018
sqlalchemy/testing/provision.py,sha256=qfQ8i06SomPPu8Bffe68BIWDa6s44flfMpeK3fFp4DM,5698
sqlalchemy/testing/replay_fixture.py,sha256=pQde_cIatESSw7hDM5UewB5M-K0OdqOxu7aRy-j1Yhs,6084
sqlalchemy/testing/requirements.py,sha256=PIYtE2j1F6kEJYMZ0NGJe_358kcXjcfPTfy891G-RVE,33825
sqlalchemy/testing/schema.py,sha256=mEBW8JKWNJoTA0dqL4IkQaLzSixsktK5T3e7SqOhX-s,3828
sqlalchemy/testing/suite/__init__.py,sha256=y5Xv66ZdG_zCWEXRIWDWRoFtFIBivVa27Sqb6Ve_3wM,368
sqlalchemy/testing/suite/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_results.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_select.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_types.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-36.pyc,,
sqlalchemy/testing/suite/test_cte.py,sha256=f2NFiZf5UNrlERJ1GT6ZNBzKxYa8vUgiAqgNJXQA4LY,7012
sqlalchemy/testing/suite/test_ddl.py,sha256=uzAg5Ur03bcc4XNZLWvxDH0GjBiRehm2zjaieeJ7A04,2988
sqlalchemy/testing/suite/test_dialect.py,sha256=HDSS7zVRNaXmVpMjFXzkBNQ5t2GxNHZMH-dA4wPx-hk,7093
sqlalchemy/testing/suite/test_insert.py,sha256=PDju2SJ5zxmBL36FfJHjKx1-ncvAQnB809c5Jkj6gLI,9991
sqlalchemy/testing/suite/test_reflection.py,sha256=FcuT6sqAlhVnlWJLT-D92wA1NXBi8RXizKEVtYushYY,49802
sqlalchemy/testing/suite/test_results.py,sha256=ySfNVlqK6ne4CaGL06THvNY4fD0yDYeaKqUizpxhCto,11321
sqlalchemy/testing/suite/test_select.py,sha256=h_cNuzcolB5qEEUvd68TH_Sil5VZhC54dk63uFSuaq4,25198
sqlalchemy/testing/suite/test_sequence.py,sha256=OdhYzm7YLAAuAwI48NZ8GwpkDPYnKmjLik5Bgv6FLaI,4817
sqlalchemy/testing/suite/test_types.py,sha256=Ez-HJYvWmRbtmn6FcPeAhp7AYfZdmA96j4zkGjWzx-U,38359
sqlalchemy/testing/suite/test_update_delete.py,sha256=hI1arMUwZuAjX2QoyzVCFIddKZvCyYUjOHrJo85wlio,1547
sqlalchemy/testing/util.py,sha256=yy46wRMK3dNcvh5byIIfQSReIYzRti_jIANGaStkByU,10543
sqlalchemy/testing/warnings.py,sha256=MD_Vq8Zzd0W2vJm1N4m4ZCf5Stbd0HYJVnK20tkbV6Q,1732
sqlalchemy/types.py,sha256=F71xo2piMj4B2uHq5t8Xy8fVzYn0iMqWZbTJ6NQzi3A,3494
sqlalchemy/util/__init__.py,sha256=gfTZDojrUyCMLxl_C6FmN4XsIKNKV6AlSBKJUrMCuMs,6866
sqlalchemy/util/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/util/__pycache__/_collections.cpython-36.pyc,,
sqlalchemy/util/__pycache__/_preloaded.cpython-36.pyc,,
sqlalchemy/util/__pycache__/compat.cpython-36.pyc,,
sqlalchemy/util/__pycache__/deprecations.cpython-36.pyc,,
sqlalchemy/util/__pycache__/langhelpers.cpython-36.pyc,,
sqlalchemy/util/__pycache__/queue.cpython-36.pyc,,
sqlalchemy/util/__pycache__/topological.cpython-36.pyc,,
sqlalchemy/util/_collections.py,sha256=sR6_6vgJRhTAFVEg-Yv8BZuSigJLR4VOzAvkX7snwHw,30320
sqlalchemy/util/_preloaded.py,sha256=O85O9xixY7gwlkJBILOZxC8vEnhQymOpt3uX7oQ_HOg,5973
sqlalchemy/util/compat.py,sha256=SC5nIQlWHLW6AK4BCYKwSZnxx3ewj4tWJE0-g2DvtDw,17494
sqlalchemy/util/deprecations.py,sha256=RyjUfbW-QcQUsjwR7gF1gtJTnNjTM1vx0wtcMbzRaWk,7733
sqlalchemy/util/langhelpers.py,sha256=W7CX9U_-oCslZn02q1CUO4qAuvcHaZpKC5UtiKRn1ZU,49352
sqlalchemy/util/queue.py,sha256=u58ho8_fklfw-JIuFW39DNiD9dNE91i-Iqj-ZAWlGAk,7036
sqlalchemy/util/topological.py,sha256=XgC6Z9DhSlWwBsvkiRZ3YlvpzVNct8p80xkjc124p9w,2864

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2013-2019 Nikolay Kim and Andrew Svetlov
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,661 +0,0 @@
Metadata-Version: 2.1
Name: aiohttp
Version: 3.6.3
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Author: Nikolay Kim
Author-email: fafhrd91@gmail.com
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
Maintainer-email: aio-libs@googlegroups.com
License: Apache 2
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.5.3
Requires-Dist: attrs (>=17.3.0)
Requires-Dist: chardet (<4.0,>=2.0)
Requires-Dist: multidict (<5.0,>=4.5)
Requires-Dist: async-timeout (<4.0,>=3.0)
Requires-Dist: yarl (<1.6.0,>=1.0)
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
Provides-Extra: speedups
Requires-Dist: aiodns ; extra == 'speedups'
Requires-Dist: brotlipy ; extra == 'speedups'
Requires-Dist: cchardet ; extra == 'speedups'
==================================
Async http client/server framework
==================================
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
:height: 64px
:width: 64px
:alt: aiohttp logo
|
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
:target: https://travis-ci.com/aio-libs/aiohttp
:align: right
:alt: Travis status for master branch
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
:align: right
:alt: AppVeyor status for master branch
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/aiohttp
:alt: codecov.io status for master branch
.. image:: https://badge.fury.io/py/aiohttp.svg
:target: https://pypi.org/project/aiohttp
:alt: Latest PyPI package version
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
Key Features
============
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
- Provides Web-server with middlewares and pluggable routing.
Getting started
===============
Client
------
To get something from the web:
.. code-block:: python
import aiohttp
import asyncio
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://python.org')
print(html)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Server
------
An example using a simple server:
.. code-block:: python
# examples/server_simple.py
from aiohttp import web
async def handle(request):
name = request.match_info.get('name', "Anonymous")
text = "Hello, " + name
return web.Response(text=text)
async def wshandle(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == web.WSMsgType.text:
await ws.send_str("Hello, {}".format(msg.data))
elif msg.type == web.WSMsgType.binary:
await ws.send_bytes(msg.data)
elif msg.type == web.WSMsgType.close:
break
return ws
app = web.Application()
app.add_routes([web.get('/', handle),
web.get('/echo', wshandle),
web.get('/{name}', handle)])
if __name__ == '__main__':
web.run_app(app)
Documentation
=============
https://aiohttp.readthedocs.io/
Demos
=====
https://github.com/aio-libs/aiohttp-demos
External links
==============
* `Third party libraries
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
* `Built with aiohttp
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
* `Powered by aiohttp
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
Feel free to post your questions and ideas here.
*gitter chat* https://gitter.im/aio-libs/Lobby
We support `Stack Overflow
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
Please add *aiohttp* tag to your question there.
Requirements
============
- Python >= 3.5.3
- async-timeout_
- attrs_
- chardet_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
.. _chardet: https://pypi.python.org/pypi/chardet
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
``aiohttp`` is offered under the Apache 2 license.
Keepsafe
========
The aiohttp community would like to thank Keepsafe
(https://www.getkeepsafe.com) for its support in the early days of
the project.
Source code
===========
The latest developer version is available in a GitHub repository:
https://github.com/aio-libs/aiohttp
Benchmarks
==========
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.6.3 (2020-10-12)
==================
Bugfixes
--------
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
release.
3.6.2 (2019-10-09)
==================
Features
--------
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
Bugfixes
--------
- Reset the ``sock_read`` timeout each time data is received for a
``aiohttp.ClientResponse``. `#3808
<https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fix handling of expired cookies so they are not stored in CookieJar.
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
- Fix misleading message in the string representation of ``ClientConnectorError``;
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
<https://github.com/aio-libs/aiohttp/issues/4097>`_
- Don't clobber HTTP status when using FileResponse.
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
Improved Documentation
----------------------
- Added minimal required logging configuration to logging documentation.
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
- Update docs to reflect proxy support.
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
- Fix typo in code example in testing docs.
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.6.1 (2019-09-19)
==================
Features
--------
- Compatibility with Python 3.8.
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
Bugfixes
--------
- correct some exception string format
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
unavailable because the runtime is built against
an outdated OpenSSL.
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
- Update multidict requirement to >= 4.5
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
Improved Documentation
----------------------
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
----
3.6.0 (2019-09-06)
==================
Features
--------
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
Proactor event loop to work. `#3629
<https://github.com/aio-libs/aiohttp/issues/3629>`_
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
compatible with more http proxy servers. `#3798
<https://github.com/aio-libs/aiohttp/issues/3798>`_
- Accept non-GET request for starting websocket handshake on server side.
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix an issue where cookies would sometimes not be set during a redirect.
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
- Change normalize_path_middleware to use 308 redirect instead of 301.
This behavior should prevent clients from being unable to use PUT/POST
methods on endpoints that are redirected because of a trailing slash.
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
task with unhandled exception when the server is used in conjunction with
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
- Use sanitized URL as Location header in redirects
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
- Improve typing annotations for multipart.py along with changes required
by mypy in files that references multipart.py.
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
- Cleanup per-chunk data in generic data read. Memory leak fixed.
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
- Use correct type for add_view and family
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
- Fix _keepalive field in __slots__ of ``RequestHandler``.
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
transport" exception when clients disconnect uncleanly.
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
- Suppress pytest warnings due to ``test_utils`` classes
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
- Fix overshadowing of overlapped sub-application prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Fixed return type annotation for WSMessage.json()
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
- Properly expose TooManyRedirects publicly as documented.
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
- Fix missing brackets for IPv6 in proxy CONNECT request
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
``asyncio.ClientSession.request`` according to the docs `#3852
<https://github.com/aio-libs/aiohttp/issues/3852>`_
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
- Add URL to the string representation of ClientResponseError.
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
Improved Documentation
----------------------
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
- use ``if __name__ == '__main__':`` in server examples.
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
- Update documentation reference to the default access logger.
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
----
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
``access_log=True`` and the event loop being in debug mode. `#3504
<https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
with files asynchronously. I/O based payloads from ``payload.py`` uses a
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
<https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write. Refactor the way how
``Payload.headers`` are handled. Payload instances now always have headers and
Content-Type defined. Fix Payload Content-Disposition header reset after initial
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
avoid a deprecation warning. `#3480
<https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
to run in a background executor to avoid blocking the main thread and potentially
triggering health check failures. (`#3205
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
and the default logger ``aiohttp.access`` is used, access logs will now be output
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
different HTTP method for WebSocket connection establishment. For example, ``Docker
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
collections module will not be supported anymore. (`#3273
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by ``normalize_path_middleware``. (`#3278
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await
connector.close()`` instead. (`#3417
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351

View File

@ -1,124 +0,0 @@
aiohttp-3.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiohttp-3.6.3.dist-info/LICENSE.txt,sha256=atcq6P9K6Td0Wq4oBfNDqYf6o6YGrHLGCfLUj3GZspQ,11533
aiohttp-3.6.3.dist-info/METADATA,sha256=UmObIpkmeVqZFB1rwDUEbIzmKMrVaEaEfDZulA48d_g,24570
aiohttp-3.6.3.dist-info/RECORD,,
aiohttp-3.6.3.dist-info/WHEEL,sha256=SktxJNdVgf2GQ0Rnj47KhGrFXFhp4j6ROtWy0UXB1eM,106
aiohttp-3.6.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
aiohttp/__init__.py,sha256=LagwdvgQ1g_JFcWIC-PjM8StM1gLulnzbjJ1UncMXoI,8427
aiohttp/__pycache__/__init__.cpython-36.pyc,,
aiohttp/__pycache__/abc.cpython-36.pyc,,
aiohttp/__pycache__/base_protocol.cpython-36.pyc,,
aiohttp/__pycache__/client.cpython-36.pyc,,
aiohttp/__pycache__/client_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/client_proto.cpython-36.pyc,,
aiohttp/__pycache__/client_reqrep.cpython-36.pyc,,
aiohttp/__pycache__/client_ws.cpython-36.pyc,,
aiohttp/__pycache__/connector.cpython-36.pyc,,
aiohttp/__pycache__/cookiejar.cpython-36.pyc,,
aiohttp/__pycache__/formdata.cpython-36.pyc,,
aiohttp/__pycache__/frozenlist.cpython-36.pyc,,
aiohttp/__pycache__/hdrs.cpython-36.pyc,,
aiohttp/__pycache__/helpers.cpython-36.pyc,,
aiohttp/__pycache__/http.cpython-36.pyc,,
aiohttp/__pycache__/http_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/http_parser.cpython-36.pyc,,
aiohttp/__pycache__/http_websocket.cpython-36.pyc,,
aiohttp/__pycache__/http_writer.cpython-36.pyc,,
aiohttp/__pycache__/locks.cpython-36.pyc,,
aiohttp/__pycache__/log.cpython-36.pyc,,
aiohttp/__pycache__/multipart.cpython-36.pyc,,
aiohttp/__pycache__/payload.cpython-36.pyc,,
aiohttp/__pycache__/payload_streamer.cpython-36.pyc,,
aiohttp/__pycache__/pytest_plugin.cpython-36.pyc,,
aiohttp/__pycache__/resolver.cpython-36.pyc,,
aiohttp/__pycache__/signals.cpython-36.pyc,,
aiohttp/__pycache__/streams.cpython-36.pyc,,
aiohttp/__pycache__/tcp_helpers.cpython-36.pyc,,
aiohttp/__pycache__/test_utils.cpython-36.pyc,,
aiohttp/__pycache__/tracing.cpython-36.pyc,,
aiohttp/__pycache__/typedefs.cpython-36.pyc,,
aiohttp/__pycache__/web.cpython-36.pyc,,
aiohttp/__pycache__/web_app.cpython-36.pyc,,
aiohttp/__pycache__/web_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/web_fileresponse.cpython-36.pyc,,
aiohttp/__pycache__/web_log.cpython-36.pyc,,
aiohttp/__pycache__/web_middlewares.cpython-36.pyc,,
aiohttp/__pycache__/web_protocol.cpython-36.pyc,,
aiohttp/__pycache__/web_request.cpython-36.pyc,,
aiohttp/__pycache__/web_response.cpython-36.pyc,,
aiohttp/__pycache__/web_routedef.cpython-36.pyc,,
aiohttp/__pycache__/web_runner.cpython-36.pyc,,
aiohttp/__pycache__/web_server.cpython-36.pyc,,
aiohttp/__pycache__/web_urldispatcher.cpython-36.pyc,,
aiohttp/__pycache__/web_ws.cpython-36.pyc,,
aiohttp/__pycache__/worker.cpython-36.pyc,,
aiohttp/_cparser.pxd,sha256=xvsLl13ZXXyHGyb2Us7WsLncndQrxhyGB4KXnvbsRtQ,4099
aiohttp/_find_header.c,sha256=MOZn07_ot-UcOdQBpYAWQmyigqLvMwkqa_7l4M7D1dI,199932
aiohttp/_find_header.h,sha256=HistyxY7K3xEJ53Y5xEfwrDVDkfcV0zQ9mkzMgzi_jo,184
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
aiohttp/_frozenlist.c,sha256=-vfgzV6cNjUykuqt1kkWDiT2U92BR2zhL9b9yDiiodg,288943
aiohttp/_frozenlist.cp36-win_amd64.pyd,sha256=ChZp9r6iFxKYd5M_zgBVSnfuG69razOy5oZNxQ_mfyo,69120
aiohttp/_frozenlist.pyx,sha256=SB851KmtWpiJ2ZB05Tpo4855VkCyRtgMs843Wz8kFeg,2713
aiohttp/_headers.pxi,sha256=PxiakDsuEs0O94eHRlPcerO24TqPAxc0BtX86XZL4gw,2111
aiohttp/_helpers.c,sha256=sQcHpEGAX3jEvA8jujh4_D_fev9cRjMAc5CySqtHYrg,208657
aiohttp/_helpers.cp36-win_amd64.pyd,sha256=vaRlMUUMLYsjE0c8OyK-IYIhYr3U-V6_2O3PJCrpcfc,50176
aiohttp/_helpers.pyi,sha256=C6Q4W8EwElvD1gF1siRGMVG7evEX8fWWstZHL1BbsDA,212
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
aiohttp/_http_parser.c,sha256=W1sETtDrrBdnBiSOpqaDcO9DcE9zhyLjPTq4WKIK0bc,997494
aiohttp/_http_parser.cp36-win_amd64.pyd,sha256=GQaatWLpRa9ldsKnE7Dg20DkDJZBjsg_v0fbSR2YUo0,248320
aiohttp/_http_parser.pyx,sha256=C2XxooYRput7XPQzbaGMDrtvJtmhWa58SDPytyuAwGk,29577
aiohttp/_http_writer.c,sha256=-wuBZwiaUXEy1Zj-R5BD5igH7cUg_CYb5ZvYMsh8vzo,211620
aiohttp/_http_writer.cp36-win_amd64.pyd,sha256=yxFOMEepS8vykxlCNgXRJL-PeaDQhuhdw7pU3yryUGI,44032
aiohttp/_http_writer.pyx,sha256=TzCawCBLMe7w9eX2SEcUcLYySwkFfrfjaEYHS0Uvjtg,4353
aiohttp/_websocket.c,sha256=JrG6bXW3OR8sfxl5V1Q3VTXvGBbFTYgzgdbhQHr3LGI,136606
aiohttp/_websocket.cp36-win_amd64.pyd,sha256=BoTx06YtxrQWEGuQrGXey7sg5ZU4Fy40TMUv7UIu8H0,29184
aiohttp/_websocket.pyx,sha256=Ig8jXl_wkAXPugEWS0oPYo0-BnL8zT7uBG6BrYqVXdA,1613
aiohttp/abc.py,sha256=s3wtDI3os8uX4FdQbsvJwr67cFGhylif0mR5k2SKY04,5600
aiohttp/base_protocol.py,sha256=5PJImwc0iX8kR3VjZn1D_SAeL-6JKERi87iGHEYjJQ4,2744
aiohttp/client.py,sha256=DYv-h8V2wljt4hRmPDmU2czk9zSlSn8zua9MgssSEiY,45130
aiohttp/client_exceptions.py,sha256=RCbzCGw_HcaqnL4AHf3nol32xH_2xu1hrYbLNgpjHqk,8786
aiohttp/client_proto.py,sha256=XDXJ0G9RW8m80wHahzjgp4T5S3Rf6LSYks9Q9MajSQg,8276
aiohttp/client_reqrep.py,sha256=zf6GFaDYvpy50HZ4GntrT8flcc6B4HfwnlHw_yYdGMw,37064
aiohttp/client_ws.py,sha256=OUkkw9RwRHRmAakBibE6c63VLMWGVgoyRadoC22wtNY,10995
aiohttp/connector.py,sha256=pbq2XHrujiyQXbIhzXQK6E1zrzRYedzt8xlGNmvbQcM,43672
aiohttp/cookiejar.py,sha256=lNwvnGX3BjIDU4btE50AUsBQditLXzJhsPPUMZo-dkI,12249
aiohttp/formdata.py,sha256=1yNFnS6O0wUrIL4_V66-DwyjS3nWVd0JiPIjWKbTZTs,5957
aiohttp/frozenlist.py,sha256=PSElO5biFCVHyVEr6-hXy7--cDaHAxaWDrsFxESHsFc,1853
aiohttp/frozenlist.pyi,sha256=z-EGiL4Q5MTe1wxDZINsIhqh4Eb0oT9Xn0X_Rt7C9ns,1512
aiohttp/hdrs.py,sha256=PmN2SUiMmwiC0TMEEMSFfwirUpnrzy3jwUhniPGFlmc,3549
aiohttp/helpers.py,sha256=yAdG1c-axo7-Vsf3CRaEqb7hU5Ej-FpUgZowGA76f_U,23613
aiohttp/http.py,sha256=H9xNqvagxteFvx2R7AeYiGfze7uR6VKF5IsUAITr7d4,2183
aiohttp/http_exceptions.py,sha256=Oby70EpyDmwpsb4DpCFYXw-sa856HmWv8IjeHlWWlJo,2771
aiohttp/http_parser.py,sha256=Ttk5BSX11cXMaFJmquzd1oNkZbnodghQvBgdUGdQxnE,28676
aiohttp/http_websocket.py,sha256=KmHznrwSjtpUgxbFafBg1MaAaCpxGxoK0IL8wDKg9f8,25400
aiohttp/http_writer.py,sha256=VBMPy_AaB7m_keycuu05SCN2S3GVVyY8UCHG-W86Y1w,5411
aiohttp/locks.py,sha256=6DiJHW1eQKXypu1eWXZT3_amPhFBK-jnxdI-_BpYICk,1278
aiohttp/log.py,sha256=qAQMjI6XpX3MOAZATN4HcG0tIceSreR54orlYZaoJ0A,333
aiohttp/multipart.py,sha256=RPXfp5GMauxW19nbBaLAkzgUFKTQ9eMo4XtZ7ItGyo4,33740
aiohttp/payload.py,sha256=lCF_pZvwyBKJGk4OOLYEQhtxUwOW8rsFF0pxisvfBps,14483
aiohttp/payload_streamer.py,sha256=7koj4FVujDGriDIOes48XPp5BK9tsWYyTxJG-3aNaHc,2177
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
aiohttp/pytest_plugin.py,sha256=1_XNSrZS-czuaNVt4qvRQs-GbIIl8DaLykGpoDlZfhU,11187
aiohttp/resolver.py,sha256=mQvusmMHpS0JekvnX7R1y4aqQ7BIIv3FIkxO5wgv2xQ,3738
aiohttp/signals.py,sha256=I_QAX1S7VbN7KDnNO6CSnAzhzx42AYh2Dto_FC9DQ3k,982
aiohttp/signals.pyi,sha256=pg4KElFcxBNFU-OQpTe2x-7qKJ79bAlemgqE-yaciiU,341
aiohttp/streams.py,sha256=EPM7T5_aJLOXlBTIEeFapIQ1O33KsHTvT-wWH3X0QvQ,21093
aiohttp/tcp_helpers.py,sha256=q9fHztjKbR57sCc4zWoo89QDW88pLT0OpcdHLGcV3Fo,1694
aiohttp/test_utils.py,sha256=_GjrPdE_9v0SxzbM4Tmt8vst-KJPwL2ILM_Rl1jHhi4,21530
aiohttp/tracing.py,sha256=GGhlQDrx5AVwFt33Zl4DvBIoFcR7sXAsgXNxvkd2Uus,13740
aiohttp/typedefs.py,sha256=o4R9uAySHxTzedIfX3UPbD0a5TnD5inc_M-h_4qyC4U,1377
aiohttp/web.py,sha256=KQXp0C__KpeX8nYM3FWl-eoMAmj9LZIbx7YeI39pQco,19940
aiohttp/web_app.py,sha256=dHOhoDoakwdrya0cc6Jl6K723MKGmd_M5LxH3wDeGQI,17779
aiohttp/web_exceptions.py,sha256=CQvslnHcpFnreO-qNjnKOWQev7ZvlTG6jfV14NQwb1Q,10519
aiohttp/web_fileresponse.py,sha256=TftBNfbgowCQ0L5Iud-dewCAnXq5tIyP-8iZ-KrSHw8,13118
aiohttp/web_log.py,sha256=gOR8iLbhjeAUwGL-21qD31kA0HlYSNhpdX6eNwJ-3Uo,8490
aiohttp/web_middlewares.py,sha256=jATe_igeeoyBoWKBDW_ISOOzFKvxSoLJE1QPTqZPWGc,4310
aiohttp/web_protocol.py,sha256=Zol5oVApIE12NDLBV_W1oKW8AN-sGdBfC0RFMI050U0,22791
aiohttp/web_request.py,sha256=xzvj84uGe5Uuug1b4iKWZl8uko_0TpzYKa00POke_NM,26526
aiohttp/web_response.py,sha256=CEx04E7NLNg6mfgTjT0QPS9vJuglbw3UQvwob6Qeb7c,26202
aiohttp/web_routedef.py,sha256=5QCl85zQml2qoj7bkC9XMoK4stBVuUoiq_0uefxifjc,6293
aiohttp/web_runner.py,sha256=ArW4NjMJ24Fv68Ez-9hPL1WNzVygDYEWJ4aIfzOMKz8,11479
aiohttp/web_server.py,sha256=P826xDCDs4VgeksMam8OHKm_VzprXuOpsJrysqj3CVg,2222
aiohttp/web_urldispatcher.py,sha256=8uhNNXlHd2WJfJ4wcyQ1UxoRM1VUyWWwQhK-TPrM_GM,40043
aiohttp/web_ws.py,sha256=mAU6Ln3AbMZeXjUZSSA5MmE39hTajJIMxBE0xnq-4Tc,17414
aiohttp/worker.py,sha256=yatPZxpUOp9CzDA05Jb2UWi0eo2PgGWlQm4lIFCRCSY,8420

View File

@ -1,9 +0,0 @@
Wheel-Version: 1.0
<<<<<<< HEAD:venv/Lib/site-packages/aiohttp-3.6.3.dist-info/WHEEL
Generator: bdist_wheel (0.35.1)
=======
Generator: bdist_wheel (0.36.2)
>>>>>>> origin/rewrite3.0:venv/Lib/site-packages/SQLAlchemy-1.3.22.dist-info/WHEEL
Root-Is-Purelib: false
Tag: cp36-cp36m-win_amd64

View File

@ -1,226 +1,217 @@
__version__ = '3.6.3'
__version__ = "3.7.4.post0"
from typing import Tuple # noqa
from typing import Tuple
from . import hdrs as hdrs
from .client import BaseConnector as BaseConnector
from .client import ClientConnectionError as ClientConnectionError
from .client import (
BaseConnector as BaseConnector,
ClientConnectionError as ClientConnectionError,
ClientConnectorCertificateError as ClientConnectorCertificateError,
ClientConnectorError as ClientConnectorError,
ClientConnectorSSLError as ClientConnectorSSLError,
ClientError as ClientError,
ClientHttpProxyError as ClientHttpProxyError,
ClientOSError as ClientOSError,
ClientPayloadError as ClientPayloadError,
ClientProxyConnectionError as ClientProxyConnectionError,
ClientRequest as ClientRequest,
ClientResponse as ClientResponse,
ClientResponseError as ClientResponseError,
ClientSession as ClientSession,
ClientSSLError as ClientSSLError,
ClientTimeout as ClientTimeout,
ClientWebSocketResponse as ClientWebSocketResponse,
ContentTypeError as ContentTypeError,
Fingerprint as Fingerprint,
InvalidURL as InvalidURL,
NamedPipeConnector as NamedPipeConnector,
RequestInfo as RequestInfo,
ServerConnectionError as ServerConnectionError,
ServerDisconnectedError as ServerDisconnectedError,
ServerFingerprintMismatch as ServerFingerprintMismatch,
ServerTimeoutError as ServerTimeoutError,
TCPConnector as TCPConnector,
TooManyRedirects as TooManyRedirects,
UnixConnector as UnixConnector,
WSServerHandshakeError as WSServerHandshakeError,
request as request,
)
from .client import ClientConnectorError as ClientConnectorError
from .client import ClientConnectorSSLError as ClientConnectorSSLError
from .client import ClientError as ClientError
from .client import ClientHttpProxyError as ClientHttpProxyError
from .client import ClientOSError as ClientOSError
from .client import ClientPayloadError as ClientPayloadError
from .client import ClientProxyConnectionError as ClientProxyConnectionError
from .client import ClientRequest as ClientRequest
from .client import ClientResponse as ClientResponse
from .client import ClientResponseError as ClientResponseError
from .client import ClientSession as ClientSession
from .client import ClientSSLError as ClientSSLError
from .client import ClientTimeout as ClientTimeout
from .client import ClientWebSocketResponse as ClientWebSocketResponse
from .client import ContentTypeError as ContentTypeError
from .client import Fingerprint as Fingerprint
from .client import InvalidURL as InvalidURL
from .client import NamedPipeConnector as NamedPipeConnector
from .client import RequestInfo as RequestInfo
from .client import ServerConnectionError as ServerConnectionError
from .client import ServerDisconnectedError as ServerDisconnectedError
from .client import ServerFingerprintMismatch as ServerFingerprintMismatch
from .client import ServerTimeoutError as ServerTimeoutError
from .client import TCPConnector as TCPConnector
from .client import TooManyRedirects as TooManyRedirects
from .client import UnixConnector as UnixConnector
from .client import WSServerHandshakeError as WSServerHandshakeError
from .client import request as request
from .cookiejar import CookieJar as CookieJar
from .cookiejar import DummyCookieJar as DummyCookieJar
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
from .formdata import FormData as FormData
from .helpers import BasicAuth as BasicAuth
from .helpers import ChainMapProxy as ChainMapProxy
from .http import HttpVersion as HttpVersion
from .http import HttpVersion10 as HttpVersion10
from .http import HttpVersion11 as HttpVersion11
from .http import WebSocketError as WebSocketError
from .http import WSCloseCode as WSCloseCode
from .http import WSMessage as WSMessage
from .http import WSMsgType as WSMsgType
from .helpers import BasicAuth as BasicAuth, ChainMapProxy as ChainMapProxy
from .http import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
WebSocketError as WebSocketError,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
)
from .multipart import (
BadContentDispositionHeader as BadContentDispositionHeader,
)
from .multipart import BadContentDispositionParam as BadContentDispositionParam
from .multipart import BodyPartReader as BodyPartReader
from .multipart import MultipartReader as MultipartReader
from .multipart import MultipartWriter as MultipartWriter
from .multipart import (
BadContentDispositionParam as BadContentDispositionParam,
BodyPartReader as BodyPartReader,
MultipartReader as MultipartReader,
MultipartWriter as MultipartWriter,
content_disposition_filename as content_disposition_filename,
parse_content_disposition as parse_content_disposition,
)
from .payload import (
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
AsyncIterablePayload as AsyncIterablePayload,
BufferedReaderPayload as BufferedReaderPayload,
BytesIOPayload as BytesIOPayload,
BytesPayload as BytesPayload,
IOBasePayload as IOBasePayload,
JsonPayload as JsonPayload,
Payload as Payload,
StringIOPayload as StringIOPayload,
StringPayload as StringPayload,
TextIOPayload as TextIOPayload,
get_payload as get_payload,
payload_type as payload_type,
)
from .multipart import parse_content_disposition as parse_content_disposition
from .payload import PAYLOAD_REGISTRY as PAYLOAD_REGISTRY
from .payload import AsyncIterablePayload as AsyncIterablePayload
from .payload import BufferedReaderPayload as BufferedReaderPayload
from .payload import BytesIOPayload as BytesIOPayload
from .payload import BytesPayload as BytesPayload
from .payload import IOBasePayload as IOBasePayload
from .payload import JsonPayload as JsonPayload
from .payload import Payload as Payload
from .payload import StringIOPayload as StringIOPayload
from .payload import StringPayload as StringPayload
from .payload import TextIOPayload as TextIOPayload
from .payload import get_payload as get_payload
from .payload import payload_type as payload_type
from .payload_streamer import streamer as streamer
from .resolver import AsyncResolver as AsyncResolver
from .resolver import DefaultResolver as DefaultResolver
from .resolver import ThreadedResolver as ThreadedResolver
from .resolver import (
AsyncResolver as AsyncResolver,
DefaultResolver as DefaultResolver,
ThreadedResolver as ThreadedResolver,
)
from .signals import Signal as Signal
from .streams import EMPTY_PAYLOAD as EMPTY_PAYLOAD
from .streams import DataQueue as DataQueue
from .streams import EofStream as EofStream
from .streams import FlowControlDataQueue as FlowControlDataQueue
from .streams import StreamReader as StreamReader
from .tracing import TraceConfig as TraceConfig
from .streams import (
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
DataQueue as DataQueue,
EofStream as EofStream,
FlowControlDataQueue as FlowControlDataQueue,
StreamReader as StreamReader,
)
from .tracing import (
TraceConfig as TraceConfig,
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
)
from .tracing import (
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
)
from .tracing import (
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
)
from .tracing import (
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
)
from .tracing import (
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
)
from .tracing import TraceDnsCacheHitParams as TraceDnsCacheHitParams
from .tracing import TraceDnsCacheMissParams as TraceDnsCacheMissParams
from .tracing import (
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
)
from .tracing import (
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
)
from .tracing import TraceRequestChunkSentParams as TraceRequestChunkSentParams
from .tracing import TraceRequestEndParams as TraceRequestEndParams
from .tracing import TraceRequestExceptionParams as TraceRequestExceptionParams
from .tracing import TraceRequestRedirectParams as TraceRequestRedirectParams
from .tracing import TraceRequestStartParams as TraceRequestStartParams
from .tracing import (
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
TraceRequestEndParams as TraceRequestEndParams,
TraceRequestExceptionParams as TraceRequestExceptionParams,
TraceRequestRedirectParams as TraceRequestRedirectParams,
TraceRequestStartParams as TraceRequestStartParams,
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
)
__all__ = (
'hdrs',
__all__: Tuple[str, ...] = (
"hdrs",
# client
'BaseConnector',
'ClientConnectionError',
'ClientConnectorCertificateError',
'ClientConnectorError',
'ClientConnectorSSLError',
'ClientError',
'ClientHttpProxyError',
'ClientOSError',
'ClientPayloadError',
'ClientProxyConnectionError',
'ClientResponse',
'ClientRequest',
'ClientResponseError',
'ClientSSLError',
'ClientSession',
'ClientTimeout',
'ClientWebSocketResponse',
'ContentTypeError',
'Fingerprint',
'InvalidURL',
'RequestInfo',
'ServerConnectionError',
'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ServerTimeoutError',
'TCPConnector',
'TooManyRedirects',
'UnixConnector',
'NamedPipeConnector',
'WSServerHandshakeError',
'request',
"BaseConnector",
"ClientConnectionError",
"ClientConnectorCertificateError",
"ClientConnectorError",
"ClientConnectorSSLError",
"ClientError",
"ClientHttpProxyError",
"ClientOSError",
"ClientPayloadError",
"ClientProxyConnectionError",
"ClientResponse",
"ClientRequest",
"ClientResponseError",
"ClientSSLError",
"ClientSession",
"ClientTimeout",
"ClientWebSocketResponse",
"ContentTypeError",
"Fingerprint",
"InvalidURL",
"RequestInfo",
"ServerConnectionError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ServerTimeoutError",
"TCPConnector",
"TooManyRedirects",
"UnixConnector",
"NamedPipeConnector",
"WSServerHandshakeError",
"request",
# cookiejar
'CookieJar',
'DummyCookieJar',
"CookieJar",
"DummyCookieJar",
# formdata
'FormData',
"FormData",
# helpers
'BasicAuth',
'ChainMapProxy',
"BasicAuth",
"ChainMapProxy",
# http
'HttpVersion',
'HttpVersion10',
'HttpVersion11',
'WSMsgType',
'WSCloseCode',
'WSMessage',
'WebSocketError',
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
"WSMsgType",
"WSCloseCode",
"WSMessage",
"WebSocketError",
# multipart
'BadContentDispositionHeader',
'BadContentDispositionParam',
'BodyPartReader',
'MultipartReader',
'MultipartWriter',
'content_disposition_filename',
'parse_content_disposition',
"BadContentDispositionHeader",
"BadContentDispositionParam",
"BodyPartReader",
"MultipartReader",
"MultipartWriter",
"content_disposition_filename",
"parse_content_disposition",
# payload
'AsyncIterablePayload',
'BufferedReaderPayload',
'BytesIOPayload',
'BytesPayload',
'IOBasePayload',
'JsonPayload',
'PAYLOAD_REGISTRY',
'Payload',
'StringIOPayload',
'StringPayload',
'TextIOPayload',
'get_payload',
'payload_type',
"AsyncIterablePayload",
"BufferedReaderPayload",
"BytesIOPayload",
"BytesPayload",
"IOBasePayload",
"JsonPayload",
"PAYLOAD_REGISTRY",
"Payload",
"StringIOPayload",
"StringPayload",
"TextIOPayload",
"get_payload",
"payload_type",
# payload_streamer
'streamer',
"streamer",
# resolver
'AsyncResolver',
'DefaultResolver',
'ThreadedResolver',
"AsyncResolver",
"DefaultResolver",
"ThreadedResolver",
# signals
'Signal',
'DataQueue',
'EMPTY_PAYLOAD',
'EofStream',
'FlowControlDataQueue',
'StreamReader',
"Signal",
"DataQueue",
"EMPTY_PAYLOAD",
"EofStream",
"FlowControlDataQueue",
"StreamReader",
# tracing
'TraceConfig',
'TraceConnectionCreateEndParams',
'TraceConnectionCreateStartParams',
'TraceConnectionQueuedEndParams',
'TraceConnectionQueuedStartParams',
'TraceConnectionReuseconnParams',
'TraceDnsCacheHitParams',
'TraceDnsCacheMissParams',
'TraceDnsResolveHostEndParams',
'TraceDnsResolveHostStartParams',
'TraceRequestChunkSentParams',
'TraceRequestEndParams',
'TraceRequestExceptionParams',
'TraceRequestRedirectParams',
'TraceRequestStartParams',
'TraceResponseChunkReceivedParams',
) # type: Tuple[str, ...]
"TraceConfig",
"TraceConnectionCreateEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionReuseconnParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceDnsResolveHostEndParams",
"TraceDnsResolveHostStartParams",
"TraceRequestChunkSentParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceRequestRedirectParams",
"TraceRequestStartParams",
"TraceResponseChunkReceivedParams",
)
try:
from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa
__all__ += ('GunicornWebWorker', 'GunicornUVLoopWebWorker')
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
except ImportError: # pragma: no cover
pass

View File

@ -24,7 +24,7 @@ find_header(const char *str, int size)
int count = -1;
pchar--;
INITIAL:
NEXT_CHAR();
switch (ch) {
case 'A':
@ -8591,16 +8591,6 @@ TRANSFER_ENCODIN:
U:
NEXT_CHAR();
switch (ch) {
case 'P':
if (last) {
return -1;
}
goto UP;
case 'p':
if (last) {
return -1;
}
goto UP;
case 'R':
if (last) {
return -1;
@ -8611,6 +8601,16 @@ U:
return -1;
}
goto UR;
case 'P':
if (last) {
return -1;
}
goto UP;
case 'p':
if (last) {
return -1;
}
goto UP;
case 'S':
if (last) {
return -1;
@ -8625,6 +8625,23 @@ U:
return -1;
}
UR:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return 66;
}
goto URI;
case 'i':
if (last) {
return 66;
}
goto URI;
default:
return -1;
}
UP:
NEXT_CHAR();
switch (ch) {
@ -8698,35 +8715,18 @@ UPGRAD:
switch (ch) {
case 'E':
if (last) {
return 66;
return 67;
}
goto UPGRADE;
case 'e':
if (last) {
return 66;
return 67;
}
goto UPGRADE;
default:
return -1;
}
UR:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return 67;
}
goto URI;
case 'i':
if (last) {
return 67;
}
goto URI;
default:
return -1;
}
US:
NEXT_CHAR();
switch (ch) {
@ -8939,26 +8939,6 @@ VI:
W:
NEXT_CHAR();
switch (ch) {
case 'A':
if (last) {
return -1;
}
goto WA;
case 'a':
if (last) {
return -1;
}
goto WA;
case 'E':
if (last) {
return -1;
}
goto WE;
case 'e':
if (last) {
return -1;
}
goto WE;
case 'W':
if (last) {
return -1;
@ -8969,351 +8949,16 @@ W:
return -1;
}
goto WW;
default:
return -1;
}
WA:
NEXT_CHAR();
switch (ch) {
case 'N':
case 'A':
if (last) {
return -1;
}
goto WAN;
case 'n':
goto WA;
case 'a':
if (last) {
return -1;
}
goto WAN;
case 'R':
if (last) {
return -1;
}
goto WAR;
case 'r':
if (last) {
return -1;
}
goto WAR;
default:
return -1;
}
WAN:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return -1;
}
goto WANT;
case 't':
if (last) {
return -1;
}
goto WANT;
default:
return -1;
}
WANT:
NEXT_CHAR();
switch (ch) {
case '-':
if (last) {
return -1;
}
goto WANT_;
default:
return -1;
}
WANT_:
NEXT_CHAR();
switch (ch) {
case 'D':
if (last) {
return -1;
}
goto WANT_D;
case 'd':
if (last) {
return -1;
}
goto WANT_D;
default:
return -1;
}
WANT_D:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WANT_DI;
case 'i':
if (last) {
return -1;
}
goto WANT_DI;
default:
return -1;
}
WANT_DI:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return -1;
}
goto WANT_DIG;
case 'g':
if (last) {
return -1;
}
goto WANT_DIG;
default:
return -1;
}
WANT_DIG:
NEXT_CHAR();
switch (ch) {
case 'E':
if (last) {
return -1;
}
goto WANT_DIGE;
case 'e':
if (last) {
return -1;
}
goto WANT_DIGE;
default:
return -1;
}
WANT_DIGE:
NEXT_CHAR();
switch (ch) {
case 'S':
if (last) {
return -1;
}
goto WANT_DIGES;
case 's':
if (last) {
return -1;
}
goto WANT_DIGES;
default:
return -1;
}
WANT_DIGES:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return 71;
}
goto WANT_DIGEST;
case 't':
if (last) {
return 71;
}
goto WANT_DIGEST;
default:
return -1;
}
WAR:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARN;
case 'n':
if (last) {
return -1;
}
goto WARN;
default:
return -1;
}
WARN:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WARNI;
case 'i':
if (last) {
return -1;
}
goto WARNI;
default:
return -1;
}
WARNI:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARNIN;
case 'n':
if (last) {
return -1;
}
goto WARNIN;
default:
return -1;
}
WARNIN:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return 72;
}
goto WARNING;
case 'g':
if (last) {
return 72;
}
goto WARNING;
default:
return -1;
}
WE:
NEXT_CHAR();
switch (ch) {
case 'B':
if (last) {
return -1;
}
goto WEB;
case 'b':
if (last) {
return -1;
}
goto WEB;
default:
return -1;
}
WEB:
NEXT_CHAR();
switch (ch) {
case 'S':
if (last) {
return -1;
}
goto WEBS;
case 's':
if (last) {
return -1;
}
goto WEBS;
default:
return -1;
}
WEBS:
NEXT_CHAR();
switch (ch) {
case 'O':
if (last) {
return -1;
}
goto WEBSO;
case 'o':
if (last) {
return -1;
}
goto WEBSO;
default:
return -1;
}
WEBSO:
NEXT_CHAR();
switch (ch) {
case 'C':
if (last) {
return -1;
}
goto WEBSOC;
case 'c':
if (last) {
return -1;
}
goto WEBSOC;
default:
return -1;
}
WEBSOC:
NEXT_CHAR();
switch (ch) {
case 'K':
if (last) {
return -1;
}
goto WEBSOCK;
case 'k':
if (last) {
return -1;
}
goto WEBSOCK;
default:
return -1;
}
WEBSOCK:
NEXT_CHAR();
switch (ch) {
case 'E':
if (last) {
return -1;
}
goto WEBSOCKE;
case 'e':
if (last) {
return -1;
}
goto WEBSOCKE;
default:
return -1;
}
WEBSOCKE:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return 73;
}
goto WEBSOCKET;
case 't':
if (last) {
return 73;
}
goto WEBSOCKET;
goto WA;
default:
return -1;
}
@ -9539,18 +9184,244 @@ WWW_AUTHENTICAT:
switch (ch) {
case 'E':
if (last) {
return 74;
return 71;
}
goto WWW_AUTHENTICATE;
case 'e':
if (last) {
return 74;
return 71;
}
goto WWW_AUTHENTICATE;
default:
return -1;
}
WA:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WAN;
case 'n':
if (last) {
return -1;
}
goto WAN;
case 'R':
if (last) {
return -1;
}
goto WAR;
case 'r':
if (last) {
return -1;
}
goto WAR;
default:
return -1;
}
WAN:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return -1;
}
goto WANT;
case 't':
if (last) {
return -1;
}
goto WANT;
default:
return -1;
}
WANT:
NEXT_CHAR();
switch (ch) {
case '-':
if (last) {
return -1;
}
goto WANT_;
default:
return -1;
}
WANT_:
NEXT_CHAR();
switch (ch) {
case 'D':
if (last) {
return -1;
}
goto WANT_D;
case 'd':
if (last) {
return -1;
}
goto WANT_D;
default:
return -1;
}
WANT_D:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WANT_DI;
case 'i':
if (last) {
return -1;
}
goto WANT_DI;
default:
return -1;
}
WANT_DI:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return -1;
}
goto WANT_DIG;
case 'g':
if (last) {
return -1;
}
goto WANT_DIG;
default:
return -1;
}
WANT_DIG:
NEXT_CHAR();
switch (ch) {
case 'E':
if (last) {
return -1;
}
goto WANT_DIGE;
case 'e':
if (last) {
return -1;
}
goto WANT_DIGE;
default:
return -1;
}
WANT_DIGE:
NEXT_CHAR();
switch (ch) {
case 'S':
if (last) {
return -1;
}
goto WANT_DIGES;
case 's':
if (last) {
return -1;
}
goto WANT_DIGES;
default:
return -1;
}
WANT_DIGES:
NEXT_CHAR();
switch (ch) {
case 'T':
if (last) {
return 72;
}
goto WANT_DIGEST;
case 't':
if (last) {
return 72;
}
goto WANT_DIGEST;
default:
return -1;
}
WAR:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARN;
case 'n':
if (last) {
return -1;
}
goto WARN;
default:
return -1;
}
WARN:
NEXT_CHAR();
switch (ch) {
case 'I':
if (last) {
return -1;
}
goto WARNI;
case 'i':
if (last) {
return -1;
}
goto WARNI;
default:
return -1;
}
WARNI:
NEXT_CHAR();
switch (ch) {
case 'N':
if (last) {
return -1;
}
goto WARNIN;
case 'n':
if (last) {
return -1;
}
goto WARNIN;
default:
return -1;
}
WARNIN:
NEXT_CHAR();
switch (ch) {
case 'G':
if (last) {
return 73;
}
goto WARNING;
case 'g':
if (last) {
return 73;
}
goto WARNING;
default:
return -1;
}
X:
NEXT_CHAR();
switch (ch) {
@ -9787,12 +9658,12 @@ X_FORWARDED_FO:
switch (ch) {
case 'R':
if (last) {
return 75;
return 74;
}
goto X_FORWARDED_FOR;
case 'r':
if (last) {
return 75;
return 74;
}
goto X_FORWARDED_FOR;
default:
@ -9838,12 +9709,12 @@ X_FORWARDED_HOS:
switch (ch) {
case 'T':
if (last) {
return 76;
return 75;
}
goto X_FORWARDED_HOST;
case 't':
if (last) {
return 76;
return 75;
}
goto X_FORWARDED_HOST;
default:
@ -9906,12 +9777,12 @@ X_FORWARDED_PROT:
switch (ch) {
case 'O':
if (last) {
return 77;
return 76;
}
goto X_FORWARDED_PROTO;
case 'o':
if (last) {
return 77;
return 76;
}
goto X_FORWARDED_PROTO;
default:
@ -9989,7 +9860,6 @@ VARY:
VIA:
WANT_DIGEST:
WARNING:
WEBSOCKET:
WWW_AUTHENTICATE:
X_FORWARDED_FOR:
X_FORWARDED_HOST:

View File

@ -1,4 +1,4 @@
/* Generated by Cython 0.29.13 */
/* Generated by Cython 0.29.21 */
#define PY_SSIZE_T_CLEAN
#include "Python.h"
@ -7,8 +7,8 @@
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_13"
#define CYTHON_HEX_VERSION 0x001D0DF0
#define CYTHON_ABI "0_29_21"
#define CYTHON_HEX_VERSION 0x001D15F0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
@ -435,7 +435,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
@ -484,8 +488,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
@ -496,6 +502,13 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
@ -535,7 +548,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
@ -576,11 +589,10 @@ static CYTHON_INLINE float __PYX_NAN() {
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
@ -1086,7 +1098,7 @@ static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) {
if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) {
Py_INCREF(x);
PyList_SET_ITEM(list, len, x);
Py_SIZE(list) = len+1;
__Pyx_SET_SIZE(list, len + 1);
return 0;
}
return PyList_Append(list, x);
@ -1182,6 +1194,9 @@ static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_nam
/* SetVTable.proto */
static int __Pyx_SetVtable(PyObject *dict, void *vtable);
/* PyObjectGetAttrStrNoError.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);
/* SetupReduce.proto */
static int __Pyx_setup_reduce(PyObject* type_obj);
@ -1393,6 +1408,9 @@ static PyObject *__pyx_codeobj__3;
static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_items = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
@ -1451,6 +1469,9 @@ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_ob
int __pyx_t_1;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__init__", 0);
__Pyx_INCREF(__pyx_v_items);
@ -1561,6 +1582,9 @@ static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struc
__Pyx_RefNannyDeclarations
int __pyx_t_1;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_check_frozen", 0);
/* "aiohttp/_frozenlist.pyx":18
@ -1629,6 +1653,9 @@ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
Py_ssize_t __pyx_t_2;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_fast_len", 0);
/* "aiohttp/_frozenlist.pyx":22
@ -1747,6 +1774,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struc
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__getitem__", 0);
/* "aiohttp/_frozenlist.pyx":28
@ -1811,6 +1841,9 @@ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __py
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setitem__", 0);
/* "aiohttp/_frozenlist.pyx":31
@ -1882,6 +1915,9 @@ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __py
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__delitem__", 0);
/* "aiohttp/_frozenlist.pyx":35
@ -1954,6 +1990,9 @@ static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
Py_ssize_t __pyx_t_2;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__len__", 0);
/* "aiohttp/_frozenlist.pyx":39
@ -2015,6 +2054,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__iter__", 0);
/* "aiohttp/_frozenlist.pyx":42
@ -2094,6 +2136,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(str
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reversed__", 0);
/* "aiohttp/_frozenlist.pyx":45
@ -2158,6 +2203,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(str
static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op); /*proto*/
static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op) {
PyObject *__pyx_v_op = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__richcmp__ (wrapper)", 0);
@ -2183,6 +2231,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(stru
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__richcmp__", 0);
/* "aiohttp/_frozenlist.pyx":48
@ -2448,6 +2499,9 @@ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject
static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_pos = 0;
PyObject *__pyx_v_item = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("insert (wrapper)", 0);
@ -2510,6 +2564,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __
PyObject *__pyx_t_1 = NULL;
Py_ssize_t __pyx_t_2;
int __pyx_t_3;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("insert", 0);
/* "aiohttp/_frozenlist.pyx":62
@ -2583,6 +2640,9 @@ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __
int __pyx_r;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__contains__", 0);
/* "aiohttp/_frozenlist.pyx":66
@ -2639,6 +2699,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__iadd__", 0);
/* "aiohttp/_frozenlist.pyx":69
@ -2729,6 +2792,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __p
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("index", 0);
/* "aiohttp/_frozenlist.pyx":74
@ -2808,6 +2874,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("remove", 0);
/* "aiohttp/_frozenlist.pyx":77
@ -2897,6 +2966,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __p
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("clear", 0);
/* "aiohttp/_frozenlist.pyx":81
@ -2985,6 +3057,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("extend", 0);
/* "aiohttp/_frozenlist.pyx":85
@ -3064,6 +3139,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct _
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("reverse", 0);
/* "aiohttp/_frozenlist.pyx":89
@ -3123,6 +3201,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct _
static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_index = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("pop (wrapper)", 0);
@ -3180,6 +3261,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
Py_ssize_t __pyx_t_2;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("pop", 0);
/* "aiohttp/_frozenlist.pyx":93
@ -3257,6 +3341,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_t_2;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("append", 0);
/* "aiohttp/_frozenlist.pyx":97
@ -3335,6 +3422,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __p
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("count", 0);
/* "aiohttp/_frozenlist.pyx":101
@ -3417,6 +3507,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
PyObject *__pyx_t_6 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__repr__", 0);
/* "aiohttp/_frozenlist.pyx":104
@ -3538,6 +3631,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(st
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error)
@ -3587,6 +3683,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython_
int __pyx_t_3;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":5
@ -3817,6 +3916,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cytho
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":17
@ -3862,6 +3964,9 @@ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyOb
PyObject *__pyx_v___pyx_type = 0;
long __pyx_v___pyx_checksum;
PyObject *__pyx_v___pyx_state = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList (wrapper)", 0);
@ -3939,6 +4044,9 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHO
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList", 0);
/* "(tree fragment)":4
@ -4127,6 +4235,9 @@ static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_s
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList__set_state", 0);
/* "(tree fragment)":12
@ -4422,7 +4533,12 @@ static PyTypeObject __pyx_type_7aiohttp_11_frozenlist_FrozenList = {
sizeof(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
@ -4475,6 +4591,9 @@ static PyTypeObject __pyx_type_7aiohttp_11_frozenlist_FrozenList = {
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static PyMethodDef __pyx_methods[] = {
@ -4656,6 +4775,9 @@ static int __Pyx_modinit_function_export_code(void) {
static int __Pyx_modinit_type_init_code(void) {
__Pyx_RefNannyDeclarations
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
/*--- Type init code ---*/
__pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList = &__pyx_vtable_7aiohttp_11_frozenlist_FrozenList;
@ -4704,17 +4826,19 @@ static int __Pyx_modinit_function_import_code(void) {
}
#if PY_MAJOR_VERSION < 3
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC void
#else
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyObject *
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
@ -4797,6 +4921,9 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__frozenlist(PyObject *__pyx_pyinit
{
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
@ -4885,14 +5012,14 @@ if (!__Pyx_RefNanny) {
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error;
if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
(void)__Pyx_modinit_type_import_code();
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
@ -5062,7 +5189,7 @@ static int __Pyx_ParseOptionalKeywords(
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
@ -5089,7 +5216,7 @@ static int __Pyx_ParseOptionalKeywords(
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -5105,7 +5232,7 @@ static int __Pyx_ParseOptionalKeywords(
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -5942,7 +6069,7 @@ static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t
}
if (likely(__Pyx_is_valid_index(cix, size))) {
PyObject* v = PyList_GET_ITEM(L, cix);
Py_SIZE(L) -= 1;
__Pyx_SET_SIZE(L, Py_SIZE(L) - 1);
size -= 1;
memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*));
return v;
@ -6100,7 +6227,7 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
{
#if PY_MAJOR_VERSION >= 3
if (level == -1) {
if (strchr(__Pyx_MODULE_NAME, '.')) {
if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) {
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, 1);
if (!module) {
@ -6235,6 +6362,28 @@ bad:
return -1;
}
/* PyObjectGetAttrStrNoError */
static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
__Pyx_PyErr_Clear();
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) {
PyObject *result;
#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) {
return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1);
}
#endif
result = __Pyx_PyObject_GetAttrStr(obj, attr_name);
if (unlikely(!result)) {
__Pyx_PyObject_GetAttrStr_ClearAttributeError();
}
return result;
}
/* SetupReduce */
static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
int ret;
@ -6262,43 +6411,51 @@ static int __Pyx_setup_reduce(PyObject* type_obj) {
PyObject *setstate = NULL;
PyObject *setstate_cython = NULL;
#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD;
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#else
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD;
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#endif
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#endif
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD;
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
if (reduce_ex == object_reduce_ex) {
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD;
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD;
reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython);
if (likely(reduce_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (reduce == object_reduce || PyErr_Occurred()) {
goto __PYX_BAD;
}
setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
if (!setstate) PyErr_Clear();
if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD;
setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython);
if (likely(setstate_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (!setstate || PyErr_Occurred()) {
goto __PYX_BAD;
}
}
PyType_Modified((PyTypeObject*)type_obj);
}
}
goto GOOD;
BAD:
goto __PYX_GOOD;
__PYX_BAD:
if (!PyErr_Occurred())
PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
ret = -1;
GOOD:
__PYX_GOOD:
#if !CYTHON_USE_PYTYPE_LOOKUP
Py_XDECREF(object_reduce);
Py_XDECREF(object_reduce_ex);
@ -6313,7 +6470,7 @@ GOOD:
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
@ -6417,7 +6574,7 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}

View File

@ -69,15 +69,14 @@ cdef tuple headers = (
hdrs.TE,
hdrs.TRAILER,
hdrs.TRANSFER_ENCODING,
hdrs.UPGRADE,
hdrs.URI,
hdrs.UPGRADE,
hdrs.USER_AGENT,
hdrs.VARY,
hdrs.VIA,
hdrs.WWW_AUTHENTICATE,
hdrs.WANT_DIGEST,
hdrs.WARNING,
hdrs.WEBSOCKET,
hdrs.WWW_AUTHENTICATE,
hdrs.X_FORWARDED_FOR,
hdrs.X_FORWARDED_HOST,
hdrs.X_FORWARDED_PROTO,

View File

@ -1,4 +1,4 @@
/* Generated by Cython 0.29.13 */
/* Generated by Cython 0.29.21 */
#define PY_SSIZE_T_CLEAN
#include "Python.h"
@ -7,8 +7,8 @@
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_13"
#define CYTHON_HEX_VERSION 0x001D0DF0
#define CYTHON_ABI "0_29_21"
#define CYTHON_HEX_VERSION 0x001D15F0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
@ -435,7 +435,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
@ -484,8 +488,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
@ -496,6 +502,13 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
@ -535,7 +548,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
@ -576,11 +589,10 @@ static CYTHON_INLINE float __PYX_NAN() {
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
@ -1130,6 +1142,9 @@ static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_nam
#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
#endif
/* PyObjectGetAttrStrNoError.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);
/* SetupReduce.proto */
static int __Pyx_setup_reduce(PyObject* type_obj);
@ -1288,6 +1303,9 @@ static PyObject *__pyx_codeobj__3;
static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_7aiohttp_8_helpers_5reify_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_wrapped = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__init__ (wrapper)", 0);
@ -1338,6 +1356,9 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify___init__(struct __pyx_obj_7aiohttp
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__init__", 0);
/* "aiohttp/_helpers.pyx":14
@ -1413,6 +1434,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_7__doc_____get__(struct __py
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__get__", 0);
/* "aiohttp/_helpers.pyx":19
@ -1488,6 +1512,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7a
PyObject *__pyx_t_13 = NULL;
int __pyx_t_14;
int __pyx_t_15;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__get__", 0);
/* "aiohttp/_helpers.pyx":22
@ -1806,6 +1833,9 @@ static int __pyx_pf_7aiohttp_8_helpers_5reify_4__set__(CYTHON_UNUSED struct __py
int __pyx_r;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__set__", 0);
/* "aiohttp/_helpers.pyx":35
@ -1866,6 +1896,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_6__reduce_cython__(struct __
PyObject *__pyx_t_4 = NULL;
int __pyx_t_5;
PyObject *__pyx_t_6 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":5
@ -2104,6 +2137,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_8__setstate_cython__(struct
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":17
@ -2149,6 +2185,9 @@ static PyObject *__pyx_pw_7aiohttp_8_helpers_1__pyx_unpickle_reify(PyObject *__p
PyObject *__pyx_v___pyx_type = 0;
long __pyx_v___pyx_checksum;
PyObject *__pyx_v___pyx_state = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__pyx_unpickle_reify (wrapper)", 0);
@ -2226,6 +2265,9 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
int __pyx_t_6;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__pyx_unpickle_reify", 0);
/* "(tree fragment)":4
@ -2414,6 +2456,9 @@ static PyObject *__pyx_f_7aiohttp_8_helpers___pyx_unpickle_reify__set_state(stru
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__pyx_unpickle_reify__set_state", 0);
/* "(tree fragment)":12
@ -2626,7 +2671,12 @@ static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = {
sizeof(struct __pyx_obj_7aiohttp_8_helpers_reify), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_7aiohttp_8_helpers_reify, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
@ -2679,6 +2729,9 @@ static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = {
#if PY_VERSION_HEX >= 0x030800b1
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
};
static PyMethodDef __pyx_methods[] = {
@ -2840,6 +2893,9 @@ static int __Pyx_modinit_function_export_code(void) {
static int __Pyx_modinit_type_init_code(void) {
__Pyx_RefNannyDeclarations
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
/*--- Type init code ---*/
if (PyType_Ready(&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
@ -2884,17 +2940,19 @@ static int __Pyx_modinit_function_import_code(void) {
}
#if PY_MAJOR_VERSION < 3
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC void
#else
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyObject *
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
@ -2976,6 +3034,9 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__helpers(PyObject *__pyx_pyinit_mo
#endif
{
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
@ -3064,14 +3125,14 @@ if (!__Pyx_RefNanny) {
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error;
if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
(void)__Pyx_modinit_type_import_code();
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
@ -3206,7 +3267,7 @@ static int __Pyx_ParseOptionalKeywords(
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
@ -3233,7 +3294,7 @@ static int __Pyx_ParseOptionalKeywords(
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -3249,7 +3310,7 @@ static int __Pyx_ParseOptionalKeywords(
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -4136,7 +4197,7 @@ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
{
#if PY_MAJOR_VERSION >= 3
if (level == -1) {
if (strchr(__Pyx_MODULE_NAME, '.')) {
if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) {
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, 1);
if (!module) {
@ -4253,6 +4314,28 @@ static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_nam
}
#endif
/* PyObjectGetAttrStrNoError */
static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
__Pyx_PyErr_Clear();
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) {
PyObject *result;
#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) {
return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1);
}
#endif
result = __Pyx_PyObject_GetAttrStr(obj, attr_name);
if (unlikely(!result)) {
__Pyx_PyObject_GetAttrStr_ClearAttributeError();
}
return result;
}
/* SetupReduce */
static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
int ret;
@ -4280,43 +4363,51 @@ static int __Pyx_setup_reduce(PyObject* type_obj) {
PyObject *setstate = NULL;
PyObject *setstate_cython = NULL;
#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD;
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#else
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD;
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#endif
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#endif
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD;
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
if (reduce_ex == object_reduce_ex) {
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD;
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD;
reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython);
if (likely(reduce_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (reduce == object_reduce || PyErr_Occurred()) {
goto __PYX_BAD;
}
setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
if (!setstate) PyErr_Clear();
if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD;
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD;
setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython);
if (likely(setstate_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (!setstate || PyErr_Occurred()) {
goto __PYX_BAD;
}
}
PyType_Modified((PyTypeObject*)type_obj);
}
}
goto GOOD;
BAD:
goto __PYX_GOOD;
__PYX_BAD:
if (!PyErr_Occurred())
PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
ret = -1;
GOOD:
__PYX_GOOD:
#if !CYTHON_USE_PYTYPE_LOOKUP
Py_XDECREF(object_reduce);
Py_XDECREF(object_reduce_ex);
@ -4331,7 +4422,7 @@ GOOD:
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
@ -4435,7 +4526,7 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}

View File

@ -2,7 +2,5 @@ from typing import Any
class reify:
def __init__(self, wrapped: Any) -> None: ...
def __get__(self, inst: Any, owner: Any) -> Any: ...
def __set__(self, inst: Any, value: Any) -> None: ...

File diff suppressed because it is too large Load Diff

View File

@ -3,27 +3,44 @@
# Based on https://github.com/MagicStack/httptools
#
from __future__ import absolute_import, print_function
from cpython.mem cimport PyMem_Malloc, PyMem_Free
from libc.string cimport memcpy
from cpython cimport (PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE,
Py_buffer, PyBytes_AsString, PyBytes_AsStringAndSize)
from multidict import (CIMultiDict as _CIMultiDict,
CIMultiDictProxy as _CIMultiDictProxy)
from cpython cimport (
Py_buffer,
PyBUF_SIMPLE,
PyBuffer_Release,
PyBytes_AsString,
PyBytes_AsStringAndSize,
PyObject_GetBuffer,
)
from cpython.mem cimport PyMem_Free, PyMem_Malloc
from libc.limits cimport ULLONG_MAX
from libc.string cimport memcpy
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
from yarl import URL as _URL
from aiohttp import hdrs
from .http_exceptions import (
BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError,
PayloadEncodingError, ContentLengthError, TransferEncodingError)
from .http_writer import (HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11)
BadHttpMessage,
BadStatusLine,
ContentLengthError,
InvalidHeader,
InvalidURLError,
LineTooLong,
PayloadEncodingError,
TransferEncodingError,
)
from .http_parser import DeflateBuffer as _DeflateBuffer
from .streams import (EMPTY_PAYLOAD as _EMPTY_PAYLOAD,
StreamReader as _StreamReader)
from .http_writer import (
HttpVersion as _HttpVersion,
HttpVersion10 as _HttpVersion10,
HttpVersion11 as _HttpVersion11,
)
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
cimport cython
from aiohttp cimport _cparser as cparser
include "_headers.pxi"
@ -270,6 +287,7 @@ cdef class HttpParser:
size_t _max_field_size
size_t _max_headers
bint _response_with_body
bint _read_until_eof
bint _started
object _url
@ -285,6 +303,7 @@ cdef class HttpParser:
object _payload_exception
object _last_error
bint _auto_decompress
int _limit
str _content_encoding
@ -306,10 +325,12 @@ cdef class HttpParser:
PyMem_Free(self._csettings)
cdef _init(self, cparser.http_parser_type mode,
object protocol, object loop, object timer=None,
object protocol, object loop, int limit,
object timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint auto_decompress=True):
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True):
cparser.http_parser_init(self._cparser, mode)
self._cparser.data = <void*>self
self._cparser.content_length = 0
@ -334,6 +355,7 @@ cdef class HttpParser:
self._max_headers = max_headers
self._max_field_size = max_field_size
self._response_with_body = response_with_body
self._read_until_eof = read_until_eof
self._upgraded = False
self._auto_decompress = auto_decompress
self._content_encoding = None
@ -350,6 +372,7 @@ cdef class HttpParser:
self._csettings.on_chunk_complete = cb_on_chunk_complete
self._last_error = None
self._limit = limit
cdef _process_header(self):
if self._raw_name:
@ -427,10 +450,15 @@ cdef class HttpParser:
headers, raw_headers, should_close, encoding,
upgrade, chunked)
if (self._cparser.content_length > 0 or chunked or
self._cparser.method == 5): # CONNECT: 5
if (ULLONG_MAX > self._cparser.content_length > 0 or chunked or
self._cparser.method == 5 or # CONNECT: 5
(self._cparser.status_code >= 199 and
self._cparser.content_length == ULLONG_MAX and
self._read_until_eof)
):
payload = StreamReader(
self._protocol, timer=self._timer, loop=self._loop)
self._protocol, timer=self._timer, loop=self._loop,
limit=self._limit)
else:
payload = EMPTY_PAYLOAD
@ -506,12 +534,7 @@ cdef class HttpParser:
PyBuffer_Release(&self.py_buf)
# i am not sure about cparser.HPE_INVALID_METHOD,
# seems get err for valid request
# test_client_functional.py::test_post_data_with_bytesio_file
if (self._cparser.http_errno != cparser.HPE_OK and
(self._cparser.http_errno != cparser.HPE_INVALID_METHOD or
self._cparser.method == 0)):
if (self._cparser.http_errno != cparser.HPE_OK):
if self._payload_error == 0:
if self._last_error is not None:
ex = self._last_error
@ -533,16 +556,20 @@ cdef class HttpParser:
else:
return messages, False, b''
def set_upgraded(self, val):
self._upgraded = val
cdef class HttpRequestParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
def __init__(self, protocol, loop, int limit, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False):
self._init(cparser.HTTP_REQUEST, protocol, loop, timer,
bint response_with_body=True, bint read_until_eof=False,
):
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body)
payload_exception, response_with_body, read_until_eof)
cdef object _on_status_complete(self):
cdef Py_buffer py_buf
@ -563,14 +590,16 @@ cdef class HttpRequestParser(HttpParser):
cdef class HttpResponseParser(HttpParser):
def __init__(self, protocol, loop, timer=None,
def __init__(self, protocol, loop, int limit, timer=None,
size_t max_line_size=8190, size_t max_headers=32768,
size_t max_field_size=8190, payload_exception=None,
bint response_with_body=True, bint read_until_eof=False,
bint auto_decompress=True):
self._init(cparser.HTTP_RESPONSE, protocol, loop, timer,
bint auto_decompress=True
):
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, auto_decompress)
payload_exception, response_with_body, read_until_eof,
auto_decompress)
cdef object _on_status_complete(self):
if self._buf:
@ -839,7 +868,7 @@ cdef _parse_url(char* buf_data, size_t length):
return URL_build(scheme=schema,
user=user, password=password, host=host, port=port,
path=path, query=query, fragment=fragment)
path=path, query_string=query, fragment=fragment, encoded=True)
else:
raise InvalidURLError("invalid url {!r}".format(buf_data))
finally:

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,9 @@
from cpython.bytes cimport PyBytes_FromStringAndSize
from cpython.exc cimport PyErr_NoMemory
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
from cpython.object cimport PyObject_Str
from libc.stdint cimport uint8_t, uint64_t
from libc.string cimport memcpy
from cpython.exc cimport PyErr_NoMemory
from cpython.mem cimport PyMem_Malloc, PyMem_Realloc, PyMem_Free
from cpython.bytes cimport PyBytes_FromStringAndSize
from cpython.object cimport PyObject_Str
from multidict import istr

View File

@ -1,4 +1,4 @@
/* Generated by Cython 0.29.13 */
/* Generated by Cython 0.29.21 */
#define PY_SSIZE_T_CLEAN
#include "Python.h"
@ -7,8 +7,8 @@
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_13"
#define CYTHON_HEX_VERSION 0x001D0DF0
#define CYTHON_ABI "0_29_21"
#define CYTHON_HEX_VERSION 0x001D15F0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
@ -435,7 +435,11 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
@ -484,8 +488,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
@ -496,6 +502,13 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
@ -535,7 +548,7 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
#define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
@ -576,11 +589,10 @@ static CYTHON_INLINE float __PYX_NAN() {
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ \
__pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
}
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
@ -1201,8 +1213,8 @@ static PyObject *__pyx_tuple_;
static PyObject *__pyx_codeobj__2;
/* Late includes */
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
@ -1216,6 +1228,9 @@ static PyMethodDef __pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython = {"
static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_mask = 0;
PyObject *__pyx_v_data = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("_websocket_mask_cython (wrapper)", 0);
@ -1242,11 +1257,11 @@ static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 9, __pyx_L3_error)
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 11, __pyx_L3_error)
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 9, __pyx_L3_error)
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 11, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 2) {
goto __pyx_L5_argtuple_error;
@ -1259,7 +1274,7 @@ static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error)
__Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 11, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
@ -1292,11 +1307,14 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
Py_ssize_t __pyx_t_9;
Py_ssize_t __pyx_t_10;
Py_ssize_t __pyx_t_11;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("_websocket_mask_cython", 0);
__Pyx_INCREF(__pyx_v_mask);
__Pyx_INCREF(__pyx_v_data);
/* "aiohttp/_websocket.pyx":20
/* "aiohttp/_websocket.pyx":22
* uint64_t uint64_msk
*
* assert len(mask) == 4 # <<<<<<<<<<<<<<
@ -1305,15 +1323,15 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
#ifndef CYTHON_WITHOUT_ASSERTIONS
if (unlikely(!Py_OptimizeFlag)) {
__pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 20, __pyx_L1_error)
__pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 22, __pyx_L1_error)
if (unlikely(!((__pyx_t_1 == 4) != 0))) {
PyErr_SetNone(PyExc_AssertionError);
__PYX_ERR(0, 20, __pyx_L1_error)
__PYX_ERR(0, 22, __pyx_L1_error)
}
}
#endif
/* "aiohttp/_websocket.pyx":22
/* "aiohttp/_websocket.pyx":24
* assert len(mask) == 4
*
* if not isinstance(mask, bytes): # <<<<<<<<<<<<<<
@ -1324,19 +1342,19 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0);
if (__pyx_t_3) {
/* "aiohttp/_websocket.pyx":23
/* "aiohttp/_websocket.pyx":25
*
* if not isinstance(mask, bytes):
* mask = bytes(mask) # <<<<<<<<<<<<<<
*
* if isinstance(data, bytearray):
*/
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_mask); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 23, __pyx_L1_error)
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_v_mask); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 25, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_mask, __pyx_t_4);
__pyx_t_4 = 0;
/* "aiohttp/_websocket.pyx":22
/* "aiohttp/_websocket.pyx":24
* assert len(mask) == 4
*
* if not isinstance(mask, bytes): # <<<<<<<<<<<<<<
@ -1345,7 +1363,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
}
/* "aiohttp/_websocket.pyx":25
/* "aiohttp/_websocket.pyx":27
* mask = bytes(mask)
*
* if isinstance(data, bytearray): # <<<<<<<<<<<<<<
@ -1356,7 +1374,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = (__pyx_t_3 != 0);
if (__pyx_t_2) {
/* "aiohttp/_websocket.pyx":26
/* "aiohttp/_websocket.pyx":28
*
* if isinstance(data, bytearray):
* data = <bytearray>data # <<<<<<<<<<<<<<
@ -1368,7 +1386,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4);
__pyx_t_4 = 0;
/* "aiohttp/_websocket.pyx":25
/* "aiohttp/_websocket.pyx":27
* mask = bytes(mask)
*
* if isinstance(data, bytearray): # <<<<<<<<<<<<<<
@ -1378,7 +1396,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
goto __pyx_L4;
}
/* "aiohttp/_websocket.pyx":28
/* "aiohttp/_websocket.pyx":30
* data = <bytearray>data
* else:
* data = bytearray(data) # <<<<<<<<<<<<<<
@ -1386,45 +1404,45 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
* data_len = len(data)
*/
/*else*/ {
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_v_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 28, __pyx_L1_error)
__pyx_t_4 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyByteArray_Type)), __pyx_v_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 30, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4);
__pyx_t_4 = 0;
}
__pyx_L4:;
/* "aiohttp/_websocket.pyx":30
/* "aiohttp/_websocket.pyx":32
* data = bytearray(data)
*
* data_len = len(data) # <<<<<<<<<<<<<<
* in_buf = <unsigned char*>PyByteArray_AsString(data)
* mask_buf = <const unsigned char*>PyBytes_AsString(mask)
*/
__pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 30, __pyx_L1_error)
__pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 32, __pyx_L1_error)
__pyx_v_data_len = __pyx_t_1;
/* "aiohttp/_websocket.pyx":31
/* "aiohttp/_websocket.pyx":33
*
* data_len = len(data)
* in_buf = <unsigned char*>PyByteArray_AsString(data) # <<<<<<<<<<<<<<
* mask_buf = <const unsigned char*>PyBytes_AsString(mask)
* uint32_msk = (<uint32_t*>mask_buf)[0]
*/
if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 31, __pyx_L1_error)
__pyx_t_5 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 31, __pyx_L1_error)
if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 33, __pyx_L1_error)
__pyx_t_5 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 33, __pyx_L1_error)
__pyx_v_in_buf = ((unsigned char *)__pyx_t_5);
/* "aiohttp/_websocket.pyx":32
/* "aiohttp/_websocket.pyx":34
* data_len = len(data)
* in_buf = <unsigned char*>PyByteArray_AsString(data)
* mask_buf = <const unsigned char*>PyBytes_AsString(mask) # <<<<<<<<<<<<<<
* uint32_msk = (<uint32_t*>mask_buf)[0]
*
*/
__pyx_t_5 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 32, __pyx_L1_error)
__pyx_t_5 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_5 == ((char *)NULL))) __PYX_ERR(0, 34, __pyx_L1_error)
__pyx_v_mask_buf = ((unsigned char const *)__pyx_t_5);
/* "aiohttp/_websocket.pyx":33
/* "aiohttp/_websocket.pyx":35
* in_buf = <unsigned char*>PyByteArray_AsString(data)
* mask_buf = <const unsigned char*>PyBytes_AsString(mask)
* uint32_msk = (<uint32_t*>mask_buf)[0] # <<<<<<<<<<<<<<
@ -1433,7 +1451,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_uint32_msk = (((uint32_t *)__pyx_v_mask_buf)[0]);
/* "aiohttp/_websocket.pyx":38
/* "aiohttp/_websocket.pyx":40
* # does it need in python ?! malloc() always aligns to sizeof(long) bytes
*
* if sizeof(size_t) >= 8: # <<<<<<<<<<<<<<
@ -1443,7 +1461,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = (((sizeof(size_t)) >= 8) != 0);
if (__pyx_t_2) {
/* "aiohttp/_websocket.pyx":39
/* "aiohttp/_websocket.pyx":41
*
* if sizeof(size_t) >= 8:
* uint64_msk = uint32_msk # <<<<<<<<<<<<<<
@ -1452,7 +1470,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_uint64_msk = __pyx_v_uint32_msk;
/* "aiohttp/_websocket.pyx":40
/* "aiohttp/_websocket.pyx":42
* if sizeof(size_t) >= 8:
* uint64_msk = uint32_msk
* uint64_msk = (uint64_msk << 32) | uint32_msk # <<<<<<<<<<<<<<
@ -1461,7 +1479,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_uint64_msk = ((__pyx_v_uint64_msk << 32) | __pyx_v_uint32_msk);
/* "aiohttp/_websocket.pyx":42
/* "aiohttp/_websocket.pyx":44
* uint64_msk = (uint64_msk << 32) | uint32_msk
*
* while data_len >= 8: # <<<<<<<<<<<<<<
@ -1472,7 +1490,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = ((__pyx_v_data_len >= 8) != 0);
if (!__pyx_t_2) break;
/* "aiohttp/_websocket.pyx":43
/* "aiohttp/_websocket.pyx":45
*
* while data_len >= 8:
* (<uint64_t*>in_buf)[0] ^= uint64_msk # <<<<<<<<<<<<<<
@ -1483,7 +1501,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_7 = 0;
(__pyx_t_6[__pyx_t_7]) = ((__pyx_t_6[__pyx_t_7]) ^ __pyx_v_uint64_msk);
/* "aiohttp/_websocket.pyx":44
/* "aiohttp/_websocket.pyx":46
* while data_len >= 8:
* (<uint64_t*>in_buf)[0] ^= uint64_msk
* in_buf += 8 # <<<<<<<<<<<<<<
@ -1492,7 +1510,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_in_buf = (__pyx_v_in_buf + 8);
/* "aiohttp/_websocket.pyx":45
/* "aiohttp/_websocket.pyx":47
* (<uint64_t*>in_buf)[0] ^= uint64_msk
* in_buf += 8
* data_len -= 8 # <<<<<<<<<<<<<<
@ -1502,7 +1520,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_v_data_len = (__pyx_v_data_len - 8);
}
/* "aiohttp/_websocket.pyx":38
/* "aiohttp/_websocket.pyx":40
* # does it need in python ?! malloc() always aligns to sizeof(long) bytes
*
* if sizeof(size_t) >= 8: # <<<<<<<<<<<<<<
@ -1511,7 +1529,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
}
/* "aiohttp/_websocket.pyx":48
/* "aiohttp/_websocket.pyx":50
*
*
* while data_len >= 4: # <<<<<<<<<<<<<<
@ -1522,7 +1540,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_2 = ((__pyx_v_data_len >= 4) != 0);
if (!__pyx_t_2) break;
/* "aiohttp/_websocket.pyx":49
/* "aiohttp/_websocket.pyx":51
*
* while data_len >= 4:
* (<uint32_t*>in_buf)[0] ^= uint32_msk # <<<<<<<<<<<<<<
@ -1533,7 +1551,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_t_7 = 0;
(__pyx_t_8[__pyx_t_7]) = ((__pyx_t_8[__pyx_t_7]) ^ __pyx_v_uint32_msk);
/* "aiohttp/_websocket.pyx":50
/* "aiohttp/_websocket.pyx":52
* while data_len >= 4:
* (<uint32_t*>in_buf)[0] ^= uint32_msk
* in_buf += 4 # <<<<<<<<<<<<<<
@ -1542,7 +1560,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
*/
__pyx_v_in_buf = (__pyx_v_in_buf + 4);
/* "aiohttp/_websocket.pyx":51
/* "aiohttp/_websocket.pyx":53
* (<uint32_t*>in_buf)[0] ^= uint32_msk
* in_buf += 4
* data_len -= 4 # <<<<<<<<<<<<<<
@ -1552,7 +1570,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
__pyx_v_data_len = (__pyx_v_data_len - 4);
}
/* "aiohttp/_websocket.pyx":53
/* "aiohttp/_websocket.pyx":55
* data_len -= 4
*
* for i in range(0, data_len): # <<<<<<<<<<<<<<
@ -1563,7 +1581,7 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
for (__pyx_t_10 = 0; __pyx_t_10 < __pyx_t_9; __pyx_t_10+=1) {
__pyx_v_i = __pyx_t_10;
/* "aiohttp/_websocket.pyx":54
/* "aiohttp/_websocket.pyx":56
*
* for i in range(0, data_len):
* in_buf[i] ^= mask_buf[i] # <<<<<<<<<<<<<<
@ -1572,8 +1590,8 @@ static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UN
(__pyx_v_in_buf[__pyx_t_11]) = ((__pyx_v_in_buf[__pyx_t_11]) ^ (__pyx_v_mask_buf[__pyx_v_i]));
}
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
@ -1660,7 +1678,7 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = {
{0, 0, 0, 0, 0, 0, 0}
};
static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
__pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 53, __pyx_L1_error)
__pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 55, __pyx_L1_error)
return 0;
__pyx_L1_error:;
return -1;
@ -1670,17 +1688,17 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
* """
*/
__pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 9, __pyx_L1_error)
__pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple_);
__Pyx_GIVEREF(__pyx_tuple_);
__pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 9, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 9, __pyx_L1_error)
__pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 11, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
@ -1738,6 +1756,9 @@ static int __Pyx_modinit_type_init_code(void) {
static int __Pyx_modinit_type_import_code(void) {
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
/*--- Type import code ---*/
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 9, __pyx_L1_error)
@ -1786,17 +1807,19 @@ static int __Pyx_modinit_function_import_code(void) {
}
#if PY_MAJOR_VERSION < 3
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC void
#else
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyObject *
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
@ -1878,6 +1901,9 @@ static CYTHON_SMALL_CODE int __pyx_pymod_exec__websocket(PyObject *__pyx_pyinit_
#endif
{
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
@ -1966,15 +1992,15 @@ if (!__Pyx_RefNanny) {
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
(void)__Pyx_modinit_type_init_code();
if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error;
if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
/*--- Execution code ---*/
@ -1982,22 +2008,22 @@ if (!__Pyx_RefNanny) {
if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
#endif
/* "aiohttp/_websocket.pyx":9
* from libc.stdint cimport uint32_t, uint64_t, uintmax_t
/* "aiohttp/_websocket.pyx":11
*
*
* def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<<
* """Note, this function mutates its `data` argument
* """
*/
__pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error)
__pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error)
if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "aiohttp/_websocket.pyx":1
* from cpython cimport PyBytes_AsString # <<<<<<<<<<<<<<
*
* #from cpython cimport PyByteArray_AsString # cython still not exports that
*
*/
__pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
@ -2136,7 +2162,7 @@ static int __Pyx_ParseOptionalKeywords(
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
@ -2163,7 +2189,7 @@ static int __Pyx_ParseOptionalKeywords(
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -2179,7 +2205,7 @@ static int __Pyx_ParseOptionalKeywords(
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
@ -2551,7 +2577,7 @@ static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
@ -2655,7 +2681,7 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}

View File

@ -1,11 +1,13 @@
from cpython cimport PyBytes_AsString
#from cpython cimport PyByteArray_AsString # cython still not exports that
cdef extern from "Python.h":
char* PyByteArray_AsString(bytearray ba) except NULL
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
def _websocket_mask_cython(object mask, object data):
"""Note, this function mutates its `data` argument
"""

View File

@ -2,7 +2,7 @@ import asyncio
import logging
from abc import ABC, abstractmethod
from collections.abc import Sized
from http.cookies import BaseCookie, Morsel # noqa
from http.cookies import BaseCookie, Morsel
from typing import (
TYPE_CHECKING,
Any,
@ -16,24 +16,23 @@ from typing import (
Tuple,
)
from multidict import CIMultiDict # noqa
from multidict import CIMultiDict
from yarl import URL
from .helpers import get_running_loop
from .typedefs import LooseCookies
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
from .web_app import Application
from .web_exceptions import HTTPException
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
else:
BaseRequest = Request = Application = StreamResponse = None
HTTPException = None
class AbstractRouter(ABC):
def __init__(self) -> None:
self._frozen = False
@ -54,12 +53,11 @@ class AbstractRouter(ABC):
self._frozen = True
@abstractmethod
async def resolve(self, request: Request) -> 'AbstractMatchInfo':
async def resolve(self, request: Request) -> "AbstractMatchInfo":
"""Return MATCH_INFO for given request"""
class AbstractMatchInfo(ABC):
@property # pragma: no branch
@abstractmethod
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
@ -123,8 +121,7 @@ class AbstractResolver(ABC):
"""Abstract DNS resolver."""
@abstractmethod
async def resolve(self, host: str,
port: int, family: int) -> List[Dict[str, Any]]:
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
"""Return IP address for given hostname"""
@abstractmethod
@ -141,8 +138,7 @@ else:
class AbstractCookieJar(Sized, IterableBase):
"""Abstract Cookie Jar."""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._loop = get_running_loop(loop)
@abstractmethod
@ -150,13 +146,11 @@ class AbstractCookieJar(Sized, IterableBase):
"""Clear all cookies."""
@abstractmethod
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
@abstractmethod
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
"""Return the jar's cookies filtered by their attributes."""
@ -172,7 +166,7 @@ class AbstractStreamWriter(ABC):
"""Write chunk into stream."""
@abstractmethod
async def write_eof(self, chunk: bytes=b'') -> None:
async def write_eof(self, chunk: bytes = b"") -> None:
"""Write last chunk."""
@abstractmethod
@ -180,7 +174,7 @@ class AbstractStreamWriter(ABC):
"""Flush the write buffer."""
@abstractmethod
def enable_compression(self, encoding: str='deflate') -> None:
def enable_compression(self, encoding: str = "deflate") -> None:
"""Enable HTTP body compression"""
@abstractmethod
@ -188,8 +182,9 @@ class AbstractStreamWriter(ABC):
"""Enable HTTP chunked mode"""
@abstractmethod
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write HTTP headers"""
@ -201,8 +196,5 @@ class AbstractAccessLogger(ABC):
self.log_format = log_format
@abstractmethod
def log(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
"""Emit log to logger."""

View File

@ -5,8 +5,14 @@ from .tcp_helpers import tcp_nodelay
class BaseProtocol(asyncio.Protocol):
__slots__ = ('_loop', '_paused', '_drain_waiter',
'_connection_lost', '_reading_paused', 'transport')
__slots__ = (
"_loop",
"_paused",
"_drain_waiter",
"_connection_lost",
"_reading_paused",
"transport",
)
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop # type: asyncio.AbstractEventLoop
@ -71,7 +77,7 @@ class BaseProtocol(asyncio.Protocol):
async def _drain_helper(self) -> None:
if self._connection_lost:
raise ConnectionResetError('Connection lost')
raise ConnectionResetError("Connection lost")
if not self._paused:
return
waiter = self._drain_waiter

File diff suppressed because it is too large Load Diff

View File

@ -4,38 +4,41 @@ import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
from .typedefs import _CIMultiDict
from .typedefs import LooseHeaders
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import (RequestInfo, ClientResponse, ConnectionKey, # noqa
Fingerprint)
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
else:
RequestInfo = ClientResponse = ConnectionKey = None
__all__ = (
'ClientError',
'ClientConnectionError',
'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError',
'ClientSSLError',
'ClientConnectorSSLError', 'ClientConnectorCertificateError',
'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError',
'ServerFingerprintMismatch',
'ClientResponseError', 'ClientHttpProxyError',
'WSServerHandshakeError', 'ContentTypeError',
'ClientPayloadError', 'InvalidURL')
"ClientError",
"ClientConnectionError",
"ClientOSError",
"ClientConnectorError",
"ClientProxyConnectionError",
"ClientSSLError",
"ClientConnectorSSLError",
"ClientConnectorCertificateError",
"ServerConnectionError",
"ServerTimeoutError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ClientResponseError",
"ClientHttpProxyError",
"WSServerHandshakeError",
"ContentTypeError",
"ClientPayloadError",
"InvalidURL",
)
class ClientError(Exception):
@ -48,21 +51,28 @@ class ClientResponseError(ClientError):
request_info: instance of RequestInfo
"""
def __init__(self, request_info: RequestInfo,
history: Tuple[ClientResponse, ...], *,
code: Optional[int]=None,
status: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
def __init__(
self,
request_info: RequestInfo,
history: Tuple[ClientResponse, ...],
*,
code: Optional[int] = None,
status: Optional[int] = None,
message: str = "",
headers: Optional[LooseHeaders] = None,
) -> None:
self.request_info = request_info
if code is not None:
if status is not None:
raise ValueError(
"Both code and status arguments are provided; "
"code is deprecated, use status instead")
warnings.warn("code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
"code is deprecated, use status instead"
)
warnings.warn(
"code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
if status is not None:
self.status = status
elif code is not None:
@ -75,31 +85,38 @@ class ClientResponseError(ClientError):
self.args = (request_info, history)
def __str__(self) -> str:
return ("%s, message=%r, url=%r" %
(self.status, self.message, self.request_info.real_url))
return "{}, message={!r}, url={!r}".format(
self.status,
self.message,
self.request_info.real_url,
)
def __repr__(self) -> str:
args = "%r, %r" % (self.request_info, self.history)
args = f"{self.request_info!r}, {self.history!r}"
if self.status != 0:
args += ", status=%r" % (self.status,)
if self.message != '':
args += ", message=%r" % (self.message,)
args += f", status={self.status!r}"
if self.message != "":
args += f", message={self.message!r}"
if self.headers is not None:
args += ", headers=%r" % (self.headers,)
return "%s(%s)" % (type(self).__name__, args)
args += f", headers={self.headers!r}"
return "{}({})".format(type(self).__name__, args)
@property
def code(self) -> int:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
return self.status
@code.setter
def code(self, value: int) -> None:
warnings.warn("code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
self.status = value
@ -138,8 +155,8 @@ class ClientConnectorError(ClientOSError):
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
def __init__(self, connection_key: ConnectionKey,
os_error: OSError) -> None:
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
self._conn_key = connection_key
self._os_error = os_error
super().__init__(os_error.errno, os_error.strerror)
@ -158,13 +175,13 @@ class ClientConnectorError(ClientOSError):
return self._conn_key.port
@property
def ssl(self) -> Union[SSLContext, None, bool, 'Fingerprint']:
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
return self._conn_key.ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'
.format(self, self.ssl if self.ssl is not None else 'default',
self.strerror))
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
self, self.ssl if self.ssl is not None else "default", self.strerror
)
# OSError.__reduce__ does too much black magick
__reduce__ = BaseException.__reduce__
@ -185,12 +202,12 @@ class ServerConnectionError(ClientConnectionError):
class ServerDisconnectedError(ServerConnectionError):
"""Server disconnected."""
def __init__(self, message: Optional[str]=None) -> None:
self.message = message
def __init__(self, message: Optional[str] = None) -> None:
if message is None:
self.args = ()
else:
self.args = (message,)
message = "Server disconnected"
self.args = (message,)
self.message = message
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
@ -200,8 +217,7 @@ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
class ServerFingerprintMismatch(ServerConnectionError):
"""SSL certificate does not match expected fingerprint."""
def __init__(self, expected: bytes, got: bytes,
host: str, port: int) -> None:
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
self.expected = expected
self.got = got
self.host = host
@ -209,9 +225,9 @@ class ServerFingerprintMismatch(ServerConnectionError):
self.args = (expected, got, host, port)
def __repr__(self) -> str:
return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(
self.__class__.__name__, self.expected, self.got,
self.host, self.port)
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
self.__class__.__name__, self.expected, self.got, self.host, self.port
)
class ClientPayloadError(ClientError):
@ -236,7 +252,7 @@ class InvalidURL(ClientError, ValueError):
return self.args[0]
def __repr__(self) -> str:
return '<{} {}>'.format(self.__class__.__name__, self.url)
return f"<{self.__class__.__name__} {self.url}>"
class ClientSSLError(ClientConnectorError):
@ -245,13 +261,19 @@ class ClientSSLError(ClientConnectorError):
if ssl is not None:
cert_errors = (ssl.CertificateError,)
cert_errors_bases = (ClientSSLError, ssl.CertificateError,)
cert_errors_bases = (
ClientSSLError,
ssl.CertificateError,
)
ssl_errors = (ssl.SSLError,)
ssl_error_bases = (ClientSSLError, ssl.SSLError)
else: # pragma: no cover
cert_errors = tuple()
cert_errors_bases = (ClientSSLError, ValueError,)
cert_errors_bases = (
ClientSSLError,
ValueError,
)
ssl_errors = tuple()
ssl_error_bases = (ClientSSLError,)
@ -264,8 +286,9 @@ class ClientConnectorSSLError(*ssl_error_bases): # type: ignore
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
"""Response certificate error."""
def __init__(self, connection_key:
ConnectionKey, certificate_error: Exception) -> None:
def __init__(
self, connection_key: ConnectionKey, certificate_error: Exception
) -> None:
self._conn_key = connection_key
self._certificate_error = certificate_error
self.args = (connection_key, certificate_error)
@ -287,6 +310,8 @@ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
return self._conn_key.is_ssl
def __str__(self) -> str:
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} '
'[{0.certificate_error.__class__.__name__}: '
'{0.certificate_error.args}]'.format(self))
return (
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
"[{0.certificate_error.__class__.__name__}: "
"{0.certificate_error.args}]".format(self)
)

View File

@ -14,12 +14,10 @@ from .http import HttpResponseParser, RawResponseMessage
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
class ResponseHandler(BaseProtocol,
DataQueue[Tuple[RawResponseMessage, StreamReader]]):
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self,
loop: asyncio.AbstractEventLoop) -> None:
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
BaseProtocol.__init__(self, loop=loop)
DataQueue.__init__(self, loop)
@ -31,7 +29,7 @@ class ResponseHandler(BaseProtocol,
self._timer = None
self._tail = b''
self._tail = b""
self._upgraded = False
self._parser = None # type: Optional[HttpResponseParser]
@ -44,14 +42,17 @@ class ResponseHandler(BaseProtocol,
@property
def should_close(self) -> bool:
if (self._payload is not None and
not self._payload.is_eof() or self._upgraded):
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
return True
return (self._should_close or self._upgraded or
self.exception() is not None or
self._payload_parser is not None or
len(self) > 0 or bool(self._tail))
return (
self._should_close
or self._upgraded
or self.exception() is not None
or self._payload_parser is not None
or len(self) > 0
or bool(self._tail)
)
def force_close(self) -> None:
self._should_close = True
@ -65,7 +66,7 @@ class ResponseHandler(BaseProtocol,
self._drop_timeout()
def is_connected(self) -> bool:
return self.transport is not None
return self.transport is not None and not self.transport.is_closing()
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._drop_timeout()
@ -81,8 +82,8 @@ class ResponseHandler(BaseProtocol,
except Exception:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError(
'Response payload is not completed'))
ClientPayloadError("Response payload is not completed")
)
if not self.is_eof():
if isinstance(exc, OSError):
@ -130,27 +131,37 @@ class ResponseHandler(BaseProtocol,
self._drop_timeout()
if self._tail:
data, self._tail = self._tail, b''
data, self._tail = self._tail, b""
self.data_received(data)
def set_response_params(self, *, timer: BaseTimerContext=None,
skip_payload: bool=False,
read_until_eof: bool=False,
auto_decompress: bool=True,
read_timeout: Optional[float]=None) -> None:
def set_response_params(
self,
*,
timer: Optional[BaseTimerContext] = None,
skip_payload: bool = False,
read_until_eof: bool = False,
auto_decompress: bool = True,
read_timeout: Optional[float] = None,
read_bufsize: int = 2 ** 16
) -> None:
self._skip_payload = skip_payload
self._read_timeout = read_timeout
self._reschedule_timeout()
self._parser = HttpResponseParser(
self, self._loop, timer=timer,
self,
self._loop,
read_bufsize,
timer=timer,
payload_exception=ClientPayloadError,
response_with_body=not skip_payload,
read_until_eof=read_until_eof,
auto_decompress=auto_decompress)
auto_decompress=auto_decompress,
)
if self._tail:
data, self._tail = self._tail, b''
data, self._tail = self._tail, b""
self.data_received(data)
def _drop_timeout(self) -> None:
@ -165,7 +176,8 @@ class ResponseHandler(BaseProtocol,
if timeout:
self._read_timeout_handle = self._loop.call_later(
timeout, self._on_read_timeout)
timeout, self._on_read_timeout
)
else:
self._read_timeout_handle = None
@ -219,7 +231,7 @@ class ResponseHandler(BaseProtocol,
self._payload = payload
if self._skip_payload or message.code in (204, 304):
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore # noqa
self.feed_data((message, EMPTY_PAYLOAD), 0) # type: ignore
else:
self.feed_data((message, payload), 0)
if payload is not None:

View File

@ -1,5 +1,6 @@
import asyncio
import codecs
import functools
import io
import re
import sys
@ -8,7 +9,7 @@ import warnings
from hashlib import md5, sha1, sha256
from http.cookies import CookieError, Morsel, SimpleCookie
from types import MappingProxyType, TracebackType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
Dict,
@ -37,7 +38,7 @@ from .client_exceptions import (
ServerFingerprintMismatch,
)
from .formdata import FormData
from .helpers import ( # noqa
from .helpers import (
PY_36,
BaseTimerContext,
BasicAuth,
@ -49,7 +50,7 @@ from .helpers import ( # noqa
)
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter
from .log import client_logger
from .streams import StreamReader # noqa
from .streams import StreamReader
from .typedefs import (
DEFAULT_JSON_DECODER,
JSONDecoder,
@ -68,34 +69,34 @@ except ImportError: # pragma: no cover
try:
import cchardet as chardet
except ImportError: # pragma: no cover
import chardet
import chardet # type: ignore
__all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint')
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
if TYPE_CHECKING: # pragma: no cover
from .client import ClientSession # noqa
from .connector import Connection # noqa
from .tracing import Trace # noqa
from .client import ClientSession
from .connector import Connection
from .tracing import Trace
json_re = re.compile(r'^application/(?:[\w.+-]+?\+)?json')
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ContentDisposition:
type = attr.ib(type=str) # type: Optional[str]
parameters = attr.ib(type=MappingProxyType) # type: MappingProxyType[str, str] # noqa
filename = attr.ib(type=str) # type: Optional[str]
type: Optional[str]
parameters: "MappingProxyType[str, str]"
filename: Optional[str]
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class RequestInfo:
url = attr.ib(type=URL)
method = attr.ib(type=str)
headers = attr.ib(type=CIMultiDictProxy) # type: CIMultiDictProxy[str]
real_url = attr.ib(type=URL)
url: URL
method: str
headers: "CIMultiDictProxy[str]"
real_url: URL = attr.ib()
@real_url.default
def real_url_default(self) -> URL:
@ -113,10 +114,11 @@ class Fingerprint:
digestlen = len(fingerprint)
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
if not hashfunc:
raise ValueError('fingerprint has invalid length')
raise ValueError("fingerprint has invalid length")
elif hashfunc is md5 or hashfunc is sha1:
raise ValueError('md5 and sha1 are insecure and '
'not supported. Use sha256.')
raise ValueError(
"md5 and sha1 are insecure and " "not supported. Use sha256."
)
self._hashfunc = hashfunc
self._fingerprint = fingerprint
@ -125,15 +127,14 @@ class Fingerprint:
return self._fingerprint
def check(self, transport: asyncio.Transport) -> None:
if not transport.get_extra_info('sslcontext'):
if not transport.get_extra_info("sslcontext"):
return
sslobj = transport.get_extra_info('ssl_object')
sslobj = transport.get_extra_info("ssl_object")
cert = sslobj.getpeercert(binary_form=True)
got = self._hashfunc(cert).digest()
if got != self._fingerprint:
host, port, *_ = transport.get_extra_info('peername')
raise ServerFingerprintMismatch(self._fingerprint,
got, host, port)
host, port, *_ = transport.get_extra_info("peername")
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
if ssl is not None:
@ -143,61 +144,75 @@ else: # pragma: no cover
def _merge_ssl_params(
ssl: Union['SSLContext', bool, Fingerprint, None],
verify_ssl: Optional[bool],
ssl_context: Optional['SSLContext'],
fingerprint: Optional[bytes]
) -> Union['SSLContext', bool, Fingerprint, None]:
ssl: Union["SSLContext", bool, Fingerprint, None],
verify_ssl: Optional[bool],
ssl_context: Optional["SSLContext"],
fingerprint: Optional[bytes],
) -> Union["SSLContext", bool, Fingerprint, None]:
if verify_ssl is not None and not verify_ssl:
warnings.warn("verify_ssl is deprecated, use ssl=False instead",
DeprecationWarning,
stacklevel=3)
warnings.warn(
"verify_ssl is deprecated, use ssl=False instead",
DeprecationWarning,
stacklevel=3,
)
if ssl is not None:
raise ValueError("verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive")
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
)
else:
ssl = False
if ssl_context is not None:
warnings.warn("ssl_context is deprecated, use ssl=context instead",
DeprecationWarning,
stacklevel=3)
warnings.warn(
"ssl_context is deprecated, use ssl=context instead",
DeprecationWarning,
stacklevel=3,
)
if ssl is not None:
raise ValueError("verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive")
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
)
else:
ssl = ssl_context
if fingerprint is not None:
warnings.warn("fingerprint is deprecated, "
"use ssl=Fingerprint(fingerprint) instead",
DeprecationWarning,
stacklevel=3)
warnings.warn(
"fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
DeprecationWarning,
stacklevel=3,
)
if ssl is not None:
raise ValueError("verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive")
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
)
else:
ssl = Fingerprint(fingerprint)
if not isinstance(ssl, SSL_ALLOWED_TYPES):
raise TypeError("ssl should be SSLContext, bool, Fingerprint or None, "
"got {!r} instead.".format(ssl))
raise TypeError(
"ssl should be SSLContext, bool, Fingerprint or None, "
"got {!r} instead.".format(ssl)
)
return ssl
@attr.s(slots=True, frozen=True)
@attr.s(auto_attribs=True, slots=True, frozen=True)
class ConnectionKey:
# the key should contain an information about used proxy / TLS
# to prevent reusing wrong connections from a pool
host = attr.ib(type=str)
port = attr.ib(type=int) # type: Optional[int]
is_ssl = attr.ib(type=bool)
ssl = attr.ib() # type: Union[SSLContext, None, bool, Fingerprint]
proxy = attr.ib() # type: Optional[URL]
proxy_auth = attr.ib() # type: Optional[BasicAuth]
proxy_headers_hash = attr.ib(type=int) # type: Optional[int] # noqa # hash(CIMultiDict)
host: str
port: Optional[int]
is_ssl: bool
ssl: Union[SSLContext, None, bool, Fingerprint]
proxy: Optional[URL]
proxy_auth: Optional[BasicAuth]
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
def _is_expected_content_type(response_content_type: str,
expected_content_type: str) -> bool:
if expected_content_type == 'application/json':
def _is_expected_content_type(
response_content_type: str, expected_content_type: str
) -> bool:
if expected_content_type == "application/json":
return json_re.match(response_content_type) is not None
return expected_content_type in response_content_type
@ -213,11 +228,11 @@ class ClientRequest:
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
DEFAULT_HEADERS = {
hdrs.ACCEPT: '*/*',
hdrs.ACCEPT_ENCODING: 'gzip, deflate',
hdrs.ACCEPT: "*/*",
hdrs.ACCEPT_ENCODING: "gzip, deflate",
}
body = b''
body = b""
auth = None
response = None
@ -229,26 +244,31 @@ class ClientRequest:
# because _writer is instance method, thus it keeps a reference to self.
# Until writer has finished finalizer will not be called.
def __init__(self, method: str, url: URL, *,
params: Optional[Mapping[str, str]]=None,
headers: Optional[LooseHeaders]=None,
skip_auto_headers: Iterable[str]=frozenset(),
data: Any=None,
cookies: Optional[LooseCookies]=None,
auth: Optional[BasicAuth]=None,
version: http.HttpVersion=http.HttpVersion11,
compress: Optional[str]=None,
chunked: Optional[bool]=None,
expect100: bool=False,
loop: Optional[asyncio.AbstractEventLoop]=None,
response_class: Optional[Type['ClientResponse']]=None,
proxy: Optional[URL]=None,
proxy_auth: Optional[BasicAuth]=None,
timer: Optional[BaseTimerContext]=None,
session: Optional['ClientSession']=None,
ssl: Union[SSLContext, bool, Fingerprint, None]=None,
proxy_headers: Optional[LooseHeaders]=None,
traces: Optional[List['Trace']]=None):
def __init__(
self,
method: str,
url: URL,
*,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Iterable[str] = frozenset(),
data: Any = None,
cookies: Optional[LooseCookies] = None,
auth: Optional[BasicAuth] = None,
version: http.HttpVersion = http.HttpVersion11,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
response_class: Optional[Type["ClientResponse"]] = None,
proxy: Optional[URL] = None,
proxy_auth: Optional[BasicAuth] = None,
timer: Optional[BaseTimerContext] = None,
session: Optional["ClientSession"] = None,
ssl: Union[SSLContext, bool, Fingerprint, None] = None,
proxy_headers: Optional[LooseHeaders] = None,
traces: Optional[List["Trace"]] = None,
):
if loop is None:
loop = asyncio.get_event_loop()
@ -257,7 +277,7 @@ class ClientRequest:
assert isinstance(proxy, (URL, type(None))), proxy
# FIXME: session is None in tests only, need to fix tests
# assert session is not None
self._session = cast('ClientSession', session)
self._session = cast("ClientSession", session)
if params:
q = MultiDict(url.query)
url2 = url.with_query(params)
@ -299,26 +319,34 @@ class ClientRequest:
self._traces = traces
def is_ssl(self) -> bool:
return self.url.scheme in ('https', 'wss')
return self.url.scheme in ("https", "wss")
@property
def ssl(self) -> Union['SSLContext', None, bool, Fingerprint]:
def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]:
return self._ssl
@property
def connection_key(self) -> ConnectionKey:
proxy_headers = self.proxy_headers
if proxy_headers:
h = hash(tuple((k, v) for k, v in proxy_headers.items())) # type: Optional[int] # noqa
h = hash(
tuple((k, v) for k, v in proxy_headers.items())
) # type: Optional[int]
else:
h = None
return ConnectionKey(self.host, self.port, self.is_ssl(),
self.ssl,
self.proxy, self.proxy_auth, h)
return ConnectionKey(
self.host,
self.port,
self.is_ssl(),
self.ssl,
self.proxy,
self.proxy_auth,
h,
)
@property
def host(self) -> str:
ret = self.url.host
ret = self.url.raw_host
assert ret is not None
return ret
@ -329,19 +357,18 @@ class ClientRequest:
@property
def request_info(self) -> RequestInfo:
headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str]
return RequestInfo(self.url, self.method,
headers, self.original_url)
return RequestInfo(self.url, self.method, headers, self.original_url)
def update_host(self, url: URL) -> None:
"""Update destination host, port and connection type (ssl)."""
# get host/port
if not url.host:
if not url.raw_host:
raise InvalidURL(url)
# basic auth info
username, password = url.user, url.password
if username:
self.auth = helpers.BasicAuth(username, password or '')
self.auth = helpers.BasicAuth(username, password or "")
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
"""Convert request version to two elements tuple.
@ -349,13 +376,13 @@ class ClientRequest:
parser HTTP version '1.1' => (1, 1)
"""
if isinstance(version, str):
v = [l.strip() for l in version.split('.', 1)]
v = [part.strip() for part in version.split(".", 1)]
try:
version = http.HttpVersion(int(v[0]), int(v[1]))
except ValueError:
raise ValueError(
'Can not parse http version number: {}'
.format(version)) from None
f"Can not parse http version number: {version}"
) from None
self.version = version
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
@ -365,25 +392,26 @@ class ClientRequest:
# add host
netloc = cast(str, self.url.raw_host)
if helpers.is_ipv6_address(netloc):
netloc = '[{}]'.format(netloc)
netloc = f"[{netloc}]"
if self.url.port is not None and not self.url.is_default_port():
netloc += ':' + str(self.url.port)
netloc += ":" + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore
for key, value in headers:
for key, value in headers: # type: ignore
# A special case for Host header
if key.lower() == 'host':
if key.lower() == "host":
self.headers[key] = value
else:
self.headers.add(key, value)
def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
self.skip_auto_headers = CIMultiDict(
(hdr, None) for hdr in sorted(skip_auto_headers))
(hdr, None) for hdr in sorted(skip_auto_headers)
)
used_headers = self.headers.copy()
used_headers.extend(self.skip_auto_headers) # type: ignore
@ -399,9 +427,9 @@ class ClientRequest:
if not cookies:
return
c = SimpleCookie()
c = SimpleCookie() # type: SimpleCookie[str]
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
c.load(self.headers.get(hdrs.COOKIE, ""))
del self.headers[hdrs.COOKIE]
if isinstance(cookies, Mapping):
@ -417,42 +445,43 @@ class ClientRequest:
else:
c[name] = value # type: ignore
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
def update_content_encoding(self, data: Any) -> None:
"""Set request content encoding."""
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
if enc:
if self.compress:
raise ValueError(
'compress can not be set '
'if Content-Encoding header is set')
"compress can not be set " "if Content-Encoding header is set"
)
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.compress = "deflate"
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True # enable chunked, no need to deal with length
def update_transfer_encoding(self) -> None:
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
if 'chunked' in te:
if "chunked" in te:
if self.chunked:
raise ValueError(
'chunked can not be set '
'if "Transfer-Encoding: chunked" header is set')
"chunked can not be set "
'if "Transfer-Encoding: chunked" header is set'
)
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
'chunked can not be set '
'if Content-Length header is set')
"chunked can not be set " "if Content-Length header is set"
)
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
@ -465,7 +494,7 @@ class ClientRequest:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
raise TypeError("BasicAuth() tuple is required instead")
self.headers[hdrs.AUTHORIZATION] = auth.encode()
@ -503,19 +532,22 @@ class ClientRequest:
continue
self.headers[key] = value
def update_expect_continue(self, expect: bool=False) -> None:
def update_expect_continue(self, expect: bool = False) -> None:
if expect:
self.headers[hdrs.EXPECT] = '100-continue'
elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue':
self.headers[hdrs.EXPECT] = "100-continue"
elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
expect = True
if expect:
self._continue = self.loop.create_future()
def update_proxy(self, proxy: Optional[URL],
proxy_auth: Optional[BasicAuth],
proxy_headers: Optional[LooseHeaders]) -> None:
if proxy and not proxy.scheme == 'http':
def update_proxy(
self,
proxy: Optional[URL],
proxy_auth: Optional[BasicAuth],
proxy_headers: Optional[LooseHeaders],
) -> None:
if proxy and not proxy.scheme == "http":
raise ValueError("Only http proxies are supported")
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
@ -528,17 +560,18 @@ class ClientRequest:
# keep alive not supported at all
return False
if self.version == HttpVersion10:
if self.headers.get(hdrs.CONNECTION) == 'keep-alive':
if self.headers.get(hdrs.CONNECTION) == "keep-alive":
return True
else: # no headers means we close for Http 1.0
return False
elif self.headers.get(hdrs.CONNECTION) == 'close':
elif self.headers.get(hdrs.CONNECTION) == "close":
return False
return True
async def write_bytes(self, writer: AbstractStreamWriter,
conn: 'Connection') -> None:
async def write_bytes(
self, writer: AbstractStreamWriter, conn: "Connection"
) -> None:
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
@ -560,8 +593,8 @@ class ClientRequest:
await writer.write_eof()
except OSError as exc:
new_exc = ClientOSError(
exc.errno,
'Can not write request body for %s' % self.url)
exc.errno, "Can not write request body for %s" % self.url
)
new_exc.__context__ = exc
new_exc.__cause__ = exc
protocol.set_exception(new_exc)
@ -573,7 +606,7 @@ class ClientRequest:
finally:
self._writer = None
async def send(self, conn: 'Connection') -> 'ClientResponse':
async def send(self, conn: "Connection") -> "ClientResponse":
# Specify request target:
# - CONNECT request must send authority form URI
# - not CONNECT proxy must send absolute form URI
@ -582,20 +615,23 @@ class ClientRequest:
connect_host = self.url.raw_host
assert connect_host is not None
if helpers.is_ipv6_address(connect_host):
connect_host = '[{}]'.format(connect_host)
path = '{}:{}'.format(connect_host, self.url.port)
connect_host = f"[{connect_host}]"
path = f"{connect_host}:{self.url.port}"
elif self.proxy and not self.is_ssl():
path = str(self.url)
else:
path = self.url.raw_path
if self.url.raw_query_string:
path += '?' + self.url.raw_query_string
path += "?" + self.url.raw_query_string
protocol = conn.protocol
assert protocol is not None
writer = StreamWriter(
protocol, self.loop,
on_chunk_sent=self._on_chunk_request_sent
protocol,
self.loop,
on_chunk_sent=functools.partial(
self._on_chunk_request_sent, self.method, self.url
),
)
if self.compress:
@ -605,27 +641,30 @@ class ClientRequest:
writer.enable_chunking()
# set default content-type
if (self.method in self.POST_METHODS and
hdrs.CONTENT_TYPE not in self.skip_auto_headers and
hdrs.CONTENT_TYPE not in self.headers):
self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream'
if (
self.method in self.POST_METHODS
and hdrs.CONTENT_TYPE not in self.skip_auto_headers
and hdrs.CONTENT_TYPE not in self.headers
):
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
# set the connection header
connection = self.headers.get(hdrs.CONNECTION)
if not connection:
if self.keep_alive():
if self.version == HttpVersion10:
connection = 'keep-alive'
connection = "keep-alive"
else:
if self.version == HttpVersion11:
connection = 'close'
connection = "close"
if connection is not None:
self.headers[hdrs.CONNECTION] = connection
# status + headers
status_line = '{0} {1} HTTP/{2[0]}.{2[1]}'.format(
self.method, path, self.version)
status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format(
self.method, path, self.version
)
await writer.write_headers(status_line, self.headers)
self._writer = self.loop.create_task(self.write_bytes(writer, conn))
@ -633,12 +672,15 @@ class ClientRequest:
response_class = self.response_class
assert response_class is not None
self.response = response_class(
self.method, self.original_url,
writer=self._writer, continue100=self._continue, timer=self._timer,
self.method,
self.original_url,
writer=self._writer,
continue100=self._continue,
timer=self._timer,
request_info=self.request_info,
traces=self._traces,
loop=self.loop,
session=self._session
session=self._session,
)
return self.response
@ -655,17 +697,17 @@ class ClientRequest:
self._writer.cancel()
self._writer = None
async def _on_chunk_request_sent(self, chunk: bytes) -> None:
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
for trace in self._traces:
await trace.send_request_chunk_sent(chunk)
await trace.send_request_chunk_sent(method, url, chunk)
class ClientResponse(HeadersMixin):
# from the Status-Line of the response
version = None # HTTP-Version
status = None # type: int # Status-Code
reason = None # Reason-Phrase
status = None # type: int # Status-Code
reason = None # Reason-Phrase
content = None # type: StreamReader # Payload stream
_headers = None # type: CIMultiDictProxy[str] # Response headers
@ -678,18 +720,23 @@ class ClientResponse(HeadersMixin):
_closed = True # to allow __del__ for non-initialized properly response
_released = False
def __init__(self, method: str, url: URL, *,
writer: 'asyncio.Task[None]',
continue100: Optional['asyncio.Future[bool]'],
timer: BaseTimerContext,
request_info: RequestInfo,
traces: List['Trace'],
loop: asyncio.AbstractEventLoop,
session: 'ClientSession') -> None:
def __init__(
self,
method: str,
url: URL,
*,
writer: "asyncio.Task[None]",
continue100: Optional["asyncio.Future[bool]"],
timer: BaseTimerContext,
request_info: RequestInfo,
traces: List["Trace"],
loop: asyncio.AbstractEventLoop,
session: "ClientSession",
) -> None:
assert isinstance(url, URL)
self.method = method
self.cookies = SimpleCookie()
self.cookies = SimpleCookie() # type: SimpleCookie[str]
self._real_url = url
self._url = url.with_fragment(None)
@ -714,8 +761,7 @@ class ClientResponse(HeadersMixin):
@reify
def url_obj(self) -> URL:
warnings.warn(
"Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
return self._url
@reify
@ -728,7 +774,7 @@ class ClientResponse(HeadersMixin):
return self._url.host
@reify
def headers(self) -> 'CIMultiDictProxy[str]':
def headers(self) -> "CIMultiDictProxy[str]":
return self._headers
@reify
@ -749,7 +795,7 @@ class ClientResponse(HeadersMixin):
filename = multipart.content_disposition_filename(params)
return ContentDisposition(disposition_type, params, filename)
def __del__(self, _warnings: Any=warnings) -> None:
def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
@ -759,44 +805,44 @@ class ClientResponse(HeadersMixin):
if self._loop.get_debug():
if PY_36:
kwargs = {'source': self}
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn("Unclosed response {!r}".format(self),
ResourceWarning,
**kwargs)
context = {'client_response': self,
'message': 'Unclosed response'}
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
context = {"client_response": self, "message": "Unclosed response"}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def __repr__(self) -> str:
out = io.StringIO()
ascii_encodable_url = str(self.url)
if self.reason:
ascii_encodable_reason = self.reason.encode('ascii',
'backslashreplace') \
.decode('ascii')
ascii_encodable_reason = self.reason.encode(
"ascii", "backslashreplace"
).decode("ascii")
else:
ascii_encodable_reason = self.reason
print('<ClientResponse({}) [{} {}]>'.format(
ascii_encodable_url, self.status, ascii_encodable_reason),
file=out)
print(
"<ClientResponse({}) [{} {}]>".format(
ascii_encodable_url, self.status, ascii_encodable_reason
),
file=out,
)
print(self.headers, file=out)
return out.getvalue()
@property
def connection(self) -> Optional['Connection']:
def connection(self) -> Optional["Connection"]:
return self._connection
@reify
def history(self) -> Tuple['ClientResponse', ...]:
def history(self) -> Tuple["ClientResponse", ...]:
"""A sequence of of responses, if redirects occurred."""
return self._history
@reify
def links(self) -> 'MultiDictProxy[MultiDictProxy[Union[str, URL]]]':
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
links_str = ", ".join(self.headers.getall("link", []))
if not links_str:
@ -815,10 +861,7 @@ class ClientResponse(HeadersMixin):
link = MultiDict() # type: MultiDict[Union[str, URL]]
for param in params:
match = re.match(
r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$",
param, re.M
)
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
@ -834,7 +877,7 @@ class ClientResponse(HeadersMixin):
return MultiDictProxy(links)
async def start(self, connection: 'Connection') -> 'ClientResponse':
async def start(self, connection: "Connection") -> "ClientResponse":
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
@ -844,15 +887,17 @@ class ClientResponse(HeadersMixin):
while True:
# read response
try:
message, payload = await self._protocol.read() # type: ignore # noqa
message, payload = await self._protocol.read() # type: ignore
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info, self.history,
self.request_info,
self.history,
status=exc.code,
message=exc.message, headers=exc.headers) from exc
message=exc.message,
headers=exc.headers,
) from exc
if (message.code < 100 or
message.code > 199 or message.code == 101):
if message.code < 100 or message.code > 199 or message.code == 101:
break
if self._continue is not None:
@ -879,8 +924,7 @@ class ClientResponse(HeadersMixin):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning(
'Can not load response cookies: %s', exc)
client_logger.warning("Can not load response cookies: %s", exc)
return self
def _response_eof(self) -> None:
@ -890,8 +934,10 @@ class ClientResponse(HeadersMixin):
if self._connection is not None:
# websocket, protocol could be None because
# connection could be detached
if (self._connection.protocol is not None and
self._connection.protocol.upgraded):
if (
self._connection.protocol is not None
and self._connection.protocol.upgraded
):
return
self._connection.release()
@ -933,6 +979,19 @@ class ClientResponse(HeadersMixin):
self._cleanup_writer()
return noop()
@property
def ok(self) -> bool:
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
This is **not** a check for ``200 OK`` but a check that the response
status is under 400.
"""
try:
self.raise_for_status()
except ClientResponseError:
return False
return True
def raise_for_status(self) -> None:
if 400 <= self.status:
# reason should always be not None for a started response
@ -943,7 +1002,8 @@ class ClientResponse(HeadersMixin):
self.history,
status=self.status,
message=self.reason,
headers=self.headers)
headers=self.headers,
)
def _cleanup_writer(self) -> None:
if self._writer is not None:
@ -954,8 +1014,7 @@ class ClientResponse(HeadersMixin):
def _notify_content(self) -> None:
content = self.content
if content and content.exception() is None:
content.set_exception(
ClientConnectionError('Connection closed'))
content.set_exception(ClientConnectionError("Connection closed"))
self._released = True
async def wait_for_close(self) -> None:
@ -972,38 +1031,46 @@ class ClientResponse(HeadersMixin):
try:
self._body = await self.content.read()
for trace in self._traces:
await trace.send_response_chunk_received(self._body)
await trace.send_response_chunk_received(
self.method, self.url, self._body
)
except BaseException:
self.close()
raise
elif self._released:
raise ClientConnectionError('Connection closed')
raise ClientConnectionError("Connection closed")
return self._body
def get_encoding(self) -> str:
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
mimetype = helpers.parse_mimetype(ctype)
encoding = mimetype.parameters.get('charset')
encoding = mimetype.parameters.get("charset")
if encoding:
try:
codecs.lookup(encoding)
except LookupError:
encoding = None
if not encoding:
if mimetype.type == 'application' and mimetype.subtype == 'json':
if mimetype.type == "application" and (
mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
# RFC 7159 states that the default encoding is UTF-8.
encoding = 'utf-8'
# RFC 7483 defines application/rdap+json
encoding = "utf-8"
elif self._body is None:
raise RuntimeError(
"Cannot guess the encoding of " "a not yet read body"
)
else:
encoding = chardet.detect(self._body)['encoding']
encoding = chardet.detect(self._body)["encoding"]
if not encoding:
encoding = 'utf-8'
encoding = "utf-8"
return encoding
async def text(self,
encoding: Optional[str]=None, errors: str='strict') -> str:
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
"""Read response payload and decode."""
if self._body is None:
await self.read()
@ -1013,22 +1080,28 @@ class ClientResponse(HeadersMixin):
return self._body.decode(encoding, errors=errors) # type: ignore
async def json(self, *, encoding: str=None,
loads: JSONDecoder=DEFAULT_JSON_DECODER,
content_type: Optional[str]='application/json') -> Any:
async def json(
self,
*,
encoding: Optional[str] = None,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
content_type: Optional[str] = "application/json",
) -> Any:
"""Read and decodes JSON response."""
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
if not _is_expected_content_type(ctype, content_type):
raise ContentTypeError(
self.request_info,
self.history,
message=('Attempt to decode JSON with '
'unexpected mimetype: %s' % ctype),
headers=self.headers)
message=(
"Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
),
headers=self.headers,
)
stripped = self._body.strip() # type: ignore
if not stripped:
@ -1039,14 +1112,16 @@ class ClientResponse(HeadersMixin):
return loads(stripped.decode(encoding))
async def __aenter__(self) -> 'ClientResponse':
async def __aenter__(self) -> "ClientResponse":
return self
async def __aexit__(self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
# similar to _RequestContextManager, we do not need to check
# for exceptions, response object can closes connection
# is state is broken
# for exceptions, response object can close connection
# if state is broken
self.release()

View File

@ -16,7 +16,7 @@ from .http import (
WSMsgType,
)
from .http_websocket import WebSocketWriter # WSMessage
from .streams import EofStream, FlowControlDataQueue # noqa
from .streams import EofStream, FlowControlDataQueue
from .typedefs import (
DEFAULT_JSON_DECODER,
DEFAULT_JSON_ENCODER,
@ -26,21 +26,22 @@ from .typedefs import (
class ClientWebSocketResponse:
def __init__(self,
reader: 'FlowControlDataQueue[WSMessage]',
writer: WebSocketWriter,
protocol: Optional[str],
response: ClientResponse,
timeout: float,
autoclose: bool,
autoping: bool,
loop: asyncio.AbstractEventLoop,
*,
receive_timeout: Optional[float]=None,
heartbeat: Optional[float]=None,
compress: int=0,
client_notakeover: bool=False) -> None:
def __init__(
self,
reader: "FlowControlDataQueue[WSMessage]",
writer: WebSocketWriter,
protocol: Optional[str],
response: ClientResponse,
timeout: float,
autoclose: bool,
autoping: bool,
loop: asyncio.AbstractEventLoop,
*,
receive_timeout: Optional[float] = None,
heartbeat: Optional[float] = None,
compress: int = 0,
client_notakeover: bool = False,
) -> None:
self._response = response
self._conn = response.connection
@ -81,7 +82,8 @@ class ClientWebSocketResponse:
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop)
self._send_heartbeat, self._heartbeat, self._loop
)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
@ -93,7 +95,8 @@ class ClientWebSocketResponse:
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop)
self._pong_not_received, self._pong_heartbeat, self._loop
)
def _pong_not_received(self) -> None:
if not self._closed:
@ -122,7 +125,7 @@ class ClientWebSocketResponse:
def client_notakeover(self) -> bool:
return self._client_notakeover
def get_extra_info(self, name: str, default: Any=None) -> Any:
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""extra info from connection transport"""
conn = self._response.connection
if conn is None:
@ -135,31 +138,32 @@ class ClientWebSocketResponse:
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes=b'') -> None:
async def ping(self, message: bytes = b"") -> None:
await self._writer.ping(message)
async def pong(self, message: bytes=b'') -> None:
async def pong(self, message: bytes = b"") -> None:
await self._writer.pong(message)
async def send_str(self, data: str,
compress: Optional[int]=None) -> None:
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
if not isinstance(data, str):
raise TypeError('data argument must be str (%r)' % type(data))
raise TypeError("data argument must be str (%r)" % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes,
compress: Optional[int]=None) -> None:
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)' %
type(data))
raise TypeError("data argument must be byte-ish (%r)" % type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(self, data: Any,
compress: Optional[int]=None,
*, dumps: JSONEncoder=DEFAULT_JSON_ENCODER) -> None:
async def send_json(
self,
data: Any,
compress: Optional[int] = None,
*,
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
) -> None:
await self.send_str(dumps(data), compress=compress)
async def close(self, *, code: int=1000, message: bytes=b'') -> bool:
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
# we need to break `receive()` cycle first,
# `close()` may be called from different task
if self._waiting is not None and not self._closed:
@ -206,11 +210,10 @@ class ClientWebSocketResponse:
else:
return False
async def receive(self, timeout: Optional[float]=None) -> WSMessage:
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
while True:
if self._waiting is not None:
raise RuntimeError(
'Concurrent call to receive() is not allowed')
raise RuntimeError("Concurrent call to receive() is not allowed")
if self._closed:
return WS_CLOSED_MESSAGE
@ -222,8 +225,8 @@ class ClientWebSocketResponse:
self._waiting = self._loop.create_future()
try:
with async_timeout.timeout(
timeout or self._receive_timeout,
loop=self._loop):
timeout or self._receive_timeout, loop=self._loop
):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
@ -267,35 +270,32 @@ class ClientWebSocketResponse:
return msg
async def receive_str(self, *, timeout: Optional[float]=None) -> str:
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not str".format(msg.type,
msg.data))
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
return msg.data
async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes:
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(
"Received message {}:{!r} is not bytes".format(msg.type,
msg.data))
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
return msg.data
async def receive_json(self,
*, loads: JSONDecoder=DEFAULT_JSON_DECODER,
timeout: Optional[float]=None) -> Any:
async def receive_json(
self,
*,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
timeout: Optional[float] = None,
) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
def __aiter__(self) -> 'ClientWebSocketResponse':
def __aiter__(self) -> "ClientWebSocketResponse":
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE,
WSMsgType.CLOSING,
WSMsgType.CLOSED):
raise StopAsyncIteration # NOQA
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
raise StopAsyncIteration
return msg

View File

@ -10,7 +10,7 @@ from http.cookies import SimpleCookie
from itertools import cycle, islice
from time import monotonic
from types import TracebackType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
@ -44,38 +44,31 @@ from .client_exceptions import (
)
from .client_proto import ResponseHandler
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
from .helpers import (
PY_36,
CeilTimeout,
get_running_loop,
is_ip_address,
noop2,
sentinel,
)
from .helpers import PY_36, CeilTimeout, get_running_loop, is_ip_address, noop, sentinel
from .http import RESPONSES
from .locks import EventResultOrError
from .resolver import DefaultResolver
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
__all__ = ('BaseConnector', 'TCPConnector', 'UnixConnector',
'NamedPipeConnector')
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
if TYPE_CHECKING: # pragma: no cover
from .client import ClientTimeout # noqa
from .client_reqrep import ConnectionKey # noqa
from .tracing import Trace # noqa
from .client import ClientTimeout
from .client_reqrep import ConnectionKey
from .tracing import Trace
class _DeprecationWaiter:
__slots__ = ('_awaitable', '_awaited')
__slots__ = ("_awaitable", "_awaited")
def __init__(self, awaitable: Awaitable[Any]) -> None:
self._awaitable = awaitable
@ -87,9 +80,11 @@ class _DeprecationWaiter:
def __del__(self) -> None:
if not self._awaited:
warnings.warn("Connector.close() is a coroutine, "
"please use await connector.close()",
DeprecationWarning)
warnings.warn(
"Connector.close() is a coroutine, "
"please use await connector.close()",
DeprecationWarning,
)
class Connection:
@ -97,10 +92,13 @@ class Connection:
_source_traceback = None
_transport = None
def __init__(self, connector: 'BaseConnector',
key: 'ConnectionKey',
protocol: ResponseHandler,
loop: asyncio.AbstractEventLoop) -> None:
def __init__(
self,
connector: "BaseConnector",
key: "ConnectionKey",
protocol: ResponseHandler,
loop: asyncio.AbstractEventLoop,
) -> None:
self._key = key
self._connector = connector
self._loop = loop
@ -111,34 +109,30 @@ class Connection:
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __repr__(self) -> str:
return 'Connection<{}>'.format(self._key)
return f"Connection<{self._key}>"
def __del__(self, _warnings: Any=warnings) -> None:
def __del__(self, _warnings: Any = warnings) -> None:
if self._protocol is not None:
if PY_36:
kwargs = {'source': self}
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn('Unclosed connection {!r}'.format(self),
ResourceWarning,
**kwargs)
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
if self._loop.is_closed():
return
self._connector._release(
self._key, self._protocol, should_close=True)
self._connector._release(self._key, self._protocol, should_close=True)
context = {'client_connection': self,
'message': 'Unclosed connection'}
context = {"client_connection": self, "message": "Unclosed connection"}
if self._source_traceback is not None:
context['source_traceback'] = self._source_traceback
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
@property
def loop(self) -> asyncio.AbstractEventLoop:
warnings.warn("connector.loop property is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
@property
@ -166,8 +160,7 @@ class Connection:
self._notify_release()
if self._protocol is not None:
self._connector._release(
self._key, self._protocol, should_close=True)
self._connector._release(self._key, self._protocol, should_close=True)
self._protocol = None
def release(self) -> None:
@ -175,8 +168,8 @@ class Connection:
if self._protocol is not None:
self._connector._release(
self._key, self._protocol,
should_close=self._protocol.should_close)
self._key, self._protocol, should_close=self._protocol.should_close
)
self._protocol = None
@property
@ -210,18 +203,22 @@ class BaseConnector:
# abort transport after 2 seconds (cleanup broken connections)
_cleanup_closed_period = 2.0
def __init__(self, *,
keepalive_timeout: Union[object, None, float]=sentinel,
force_close: bool=False,
limit: int=100, limit_per_host: int=0,
enable_cleanup_closed: bool=False,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(
self,
*,
keepalive_timeout: Union[object, None, float] = sentinel,
force_close: bool = False,
limit: int = 100,
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
if force_close:
if keepalive_timeout is not None and \
keepalive_timeout is not sentinel:
raise ValueError('keepalive_timeout cannot '
'be set if force_close is True')
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
raise ValueError(
"keepalive_timeout cannot " "be set if force_close is True"
)
else:
if keepalive_timeout is sentinel:
keepalive_timeout = 15.0
@ -232,11 +229,15 @@ class BaseConnector:
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self._conns = {} # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] # noqa
self._conns = (
{}
) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]]
self._limit = limit
self._limit_per_host = limit_per_host
self._acquired = set() # type: Set[ResponseHandler]
self._acquired_per_host = defaultdict(set) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]] # noqa
self._acquired_per_host = defaultdict(
set
) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]]
self._keepalive_timeout = cast(float, keepalive_timeout)
self._force_close = force_close
@ -246,7 +247,7 @@ class BaseConnector:
self._loop = loop
self._factory = functools.partial(ResponseHandler, loop=loop)
self.cookies = SimpleCookie()
self.cookies = SimpleCookie() # type: SimpleCookie[str]
# start keep-alive connection cleanup task
self._cleanup_handle = None
@ -254,10 +255,10 @@ class BaseConnector:
# start cleanup closed transports task
self._cleanup_closed_handle = None
self._cleanup_closed_disabled = not enable_cleanup_closed
self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]] # noqa
self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]]
self._cleanup_closed()
def __del__(self, _warnings: Any=warnings) -> None:
def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
if not self._conns:
@ -268,36 +269,39 @@ class BaseConnector:
self._close()
if PY_36:
kwargs = {'source': self}
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn("Unclosed connector {!r}".format(self),
ResourceWarning,
**kwargs)
context = {'connector': self,
'connections': conns,
'message': 'Unclosed connector'}
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
context = {
"connector": self,
"connections": conns,
"message": "Unclosed connector",
}
if self._source_traceback is not None:
context['source_traceback'] = self._source_traceback
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def __enter__(self) -> 'BaseConnector':
warnings.warn('"witn Connector():" is deprecated, '
'use "async with Connector():" instead',
DeprecationWarning)
def __enter__(self) -> "BaseConnector":
warnings.warn(
'"witn Connector():" is deprecated, '
'use "async with Connector():" instead',
DeprecationWarning,
)
return self
def __exit__(self, *exc: Any) -> None:
self.close()
async def __aenter__(self) -> 'BaseConnector':
async def __aenter__(self) -> "BaseConnector":
return self
async def __aexit__(self,
exc_type: Optional[Type[BaseException]]=None,
exc_value: Optional[BaseException]=None,
exc_traceback: Optional[TracebackType]=None
) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]] = None,
exc_value: Optional[BaseException] = None,
exc_traceback: Optional[TracebackType] = None,
) -> None:
await self.close()
@property
@ -329,6 +333,9 @@ class BaseConnector:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
# _cleanup_handle should be unset, otherwise _release() will not
# recreate it ever!
self._cleanup_handle = None
now = self._loop.time()
timeout = self._keepalive_timeout
@ -343,12 +350,15 @@ class BaseConnector:
if use_time - deadline < 0:
transport = proto.transport
proto.close()
if (key.is_ssl and
not self._cleanup_closed_disabled):
self._cleanup_closed_transports.append(
transport)
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
alive.append((proto, use_time))
else:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
if alive:
connections[key] = alive
@ -357,10 +367,12 @@ class BaseConnector:
if self._conns:
self._cleanup_handle = helpers.weakref_handle(
self, '_cleanup', timeout, self._loop)
self, "_cleanup", timeout, self._loop
)
def _drop_acquired_per_host(self, key: 'ConnectionKey',
val: ResponseHandler) -> None:
def _drop_acquired_per_host(
self, key: "ConnectionKey", val: ResponseHandler
) -> None:
acquired_per_host = self._acquired_per_host
if key not in acquired_per_host:
return
@ -384,13 +396,13 @@ class BaseConnector:
if not self._cleanup_closed_disabled:
self._cleanup_closed_handle = helpers.weakref_handle(
self, '_cleanup_closed',
self._cleanup_closed_period, self._loop)
self, "_cleanup_closed", self._cleanup_closed_period, self._loop
)
def close(self) -> Awaitable[None]:
"""Close all opened transports."""
self._close()
return _DeprecationWaiter(noop2())
return _DeprecationWaiter(noop())
def _close(self) -> None:
if self._closed:
@ -437,7 +449,7 @@ class BaseConnector:
"""
return self._closed
def _available_connections(self, key: 'ConnectionKey') -> int:
def _available_connections(self, key: "ConnectionKey") -> int:
"""
Return number of available connections taking into account
the limit, limit_per_host and the connection key.
@ -451,8 +463,11 @@ class BaseConnector:
available = self._limit - len(self._acquired)
# check limit per host
if (self._limit_per_host and available > 0 and
key in self._acquired_per_host):
if (
self._limit_per_host
and available > 0
and key in self._acquired_per_host
):
acquired = self._acquired_per_host.get(key)
assert acquired is not None
available = self._limit_per_host - len(acquired)
@ -467,20 +482,20 @@ class BaseConnector:
return available
async def connect(self, req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout') -> Connection:
async def connect(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> Connection:
"""Get from pool or create new connection."""
key = req.connection_key
available = self._available_connections(key)
# Wait if there are no available connections.
if available <= 0:
# Wait if there are no available connections or if there are/were
# waiters (i.e. don't steal connection from a waiter about to wake up)
if available <= 0 or key in self._waiters:
fut = self._loop.create_future()
# This connection will now count towards the limit.
waiters = self._waiters[key]
waiters.append(fut)
self._waiters[key].append(fut)
if traces:
for trace in traces:
@ -489,21 +504,18 @@ class BaseConnector:
try:
await fut
except BaseException as e:
# remove a waiter even if it was cancelled, normally it's
# removed when it's notified
try:
waiters.remove(fut)
except ValueError: # fut may no longer be in list
pass
if key in self._waiters:
# remove a waiter even if it was cancelled, normally it's
# removed when it's notified
try:
self._waiters[key].remove(fut)
except ValueError: # fut may no longer be in list
pass
raise e
finally:
if not waiters:
try:
del self._waiters[key]
except KeyError:
# the key was evicted before.
pass
if key in self._waiters and not self._waiters[key]:
del self._waiters[key]
if traces:
for trace in traces:
@ -547,7 +559,7 @@ class BaseConnector:
self._acquired_per_host[key].add(proto)
return Connection(self, key, proto, self._loop)
def _get(self, key: 'ConnectionKey') -> Optional[ResponseHandler]:
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
try:
conns = self._conns[key]
except KeyError:
@ -568,6 +580,11 @@ class BaseConnector:
# The very last connection was reclaimed: drop the key
del self._conns[key]
return proto
else:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
# No more connections: drop the key
del self._conns[key]
@ -597,8 +614,7 @@ class BaseConnector:
waiter.set_result(None)
return
def _release_acquired(self, key: 'ConnectionKey',
proto: ResponseHandler) -> None:
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
if self._closed:
# acquired connection is already released on connector closing
return
@ -613,8 +629,13 @@ class BaseConnector:
else:
self._release_waiter()
def _release(self, key: 'ConnectionKey', protocol: ResponseHandler,
*, should_close: bool=False) -> None:
def _release(
self,
key: "ConnectionKey",
protocol: ResponseHandler,
*,
should_close: bool = False,
) -> None:
if self._closed:
# acquired connection is already released on connector closing
return
@ -638,18 +659,20 @@ class BaseConnector:
if self._cleanup_handle is None:
self._cleanup_handle = helpers.weakref_handle(
self, '_cleanup', self._keepalive_timeout, self._loop)
self, "_cleanup", self._keepalive_timeout, self._loop
)
async def _create_connection(self, req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout') -> ResponseHandler:
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
raise NotImplementedError()
class _DNSCacheTable:
def __init__(self, ttl: Optional[float]=None) -> None:
self._addrs_rr = {} # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] # noqa
def __init__(self, ttl: Optional[float] = None) -> None:
self._addrs_rr = (
{}
) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]]
self._timestamps = {} # type: Dict[Tuple[str, int], float]
self._ttl = ttl
@ -711,34 +734,44 @@ class TCPConnector(BaseConnector):
loop - Optional event loop.
"""
def __init__(self, *, verify_ssl: bool=True,
fingerprint: Optional[bytes]=None,
use_dns_cache: bool=True, ttl_dns_cache: int=10,
family: int=0,
ssl_context: Optional[SSLContext]=None,
ssl: Union[None, bool, Fingerprint, SSLContext]=None,
local_addr: Optional[Tuple[str, int]]=None,
resolver: Optional[AbstractResolver]=None,
keepalive_timeout: Union[None, float, object]=sentinel,
force_close: bool=False,
limit: int=100, limit_per_host: int=0,
enable_cleanup_closed: bool=False,
loop: Optional[asyncio.AbstractEventLoop]=None):
super().__init__(keepalive_timeout=keepalive_timeout,
force_close=force_close,
limit=limit, limit_per_host=limit_per_host,
enable_cleanup_closed=enable_cleanup_closed,
loop=loop)
def __init__(
self,
*,
verify_ssl: bool = True,
fingerprint: Optional[bytes] = None,
use_dns_cache: bool = True,
ttl_dns_cache: Optional[int] = 10,
family: int = 0,
ssl_context: Optional[SSLContext] = None,
ssl: Union[None, bool, Fingerprint, SSLContext] = None,
local_addr: Optional[Tuple[str, int]] = None,
resolver: Optional[AbstractResolver] = None,
keepalive_timeout: Union[None, float, object] = sentinel,
force_close: bool = False,
limit: int = 100,
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
):
super().__init__(
keepalive_timeout=keepalive_timeout,
force_close=force_close,
limit=limit,
limit_per_host=limit_per_host,
enable_cleanup_closed=enable_cleanup_closed,
loop=loop,
)
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context,
fingerprint)
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
if resolver is None:
resolver = DefaultResolver(loop=self._loop)
self._resolver = resolver
self._use_dns_cache = use_dns_cache
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
self._throttle_dns_events = {} # type: Dict[Tuple[str, int], EventResultOrError] # noqa
self._throttle_dns_events = (
{}
) # type: Dict[Tuple[str, int], EventResultOrError]
self._family = family
self._local_addr = local_addr
@ -759,25 +792,31 @@ class TCPConnector(BaseConnector):
"""True if local DNS caching is enabled."""
return self._use_dns_cache
def clear_dns_cache(self,
host: Optional[str]=None,
port: Optional[int]=None) -> None:
def clear_dns_cache(
self, host: Optional[str] = None, port: Optional[int] = None
) -> None:
"""Remove specified host/port or clear all dns local cache."""
if host is not None and port is not None:
self._cached_hosts.remove((host, port))
elif host is not None or port is not None:
raise ValueError("either both host and port "
"or none of them are allowed")
raise ValueError("either both host and port " "or none of them are allowed")
else:
self._cached_hosts.clear()
async def _resolve_host(self,
host: str, port: int,
traces: Optional[List['Trace']]=None
) -> List[Dict[str, Any]]:
async def _resolve_host(
self, host: str, port: int, traces: Optional[List["Trace"]] = None
) -> List[Dict[str, Any]]:
if is_ip_address(host):
return [{'hostname': host, 'host': host, 'port': port,
'family': self._family, 'proto': 0, 'flags': 0}]
return [
{
"hostname": host,
"host": host,
"port": port,
"family": self._family,
"proto": 0,
"flags": 0,
}
]
if not self._use_dns_cache:
@ -785,8 +824,7 @@ class TCPConnector(BaseConnector):
for trace in traces:
await trace.send_dns_resolvehost_start(host)
res = (await self._resolver.resolve(
host, port, family=self._family))
res = await self._resolver.resolve(host, port, family=self._family)
if traces:
for trace in traces:
@ -796,8 +834,7 @@ class TCPConnector(BaseConnector):
key = (host, port)
if (key in self._cached_hosts) and \
(not self._cached_hosts.expired(key)):
if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)):
# get result early, before any await (#4014)
result = self._cached_hosts.next_addrs(key)
@ -815,8 +852,7 @@ class TCPConnector(BaseConnector):
await event.wait()
else:
# update dict early, before any await (#4014)
self._throttle_dns_events[key] = \
EventResultOrError(self._loop)
self._throttle_dns_events[key] = EventResultOrError(self._loop)
if traces:
for trace in traces:
await trace.send_dns_cache_miss(host)
@ -826,8 +862,7 @@ class TCPConnector(BaseConnector):
for trace in traces:
await trace.send_dns_resolvehost_start(host)
addrs = await \
self._resolver.resolve(host, port, family=self._family)
addrs = await self._resolver.resolve(host, port, family=self._family)
if traces:
for trace in traces:
await trace.send_dns_resolvehost_end(host)
@ -844,19 +879,17 @@ class TCPConnector(BaseConnector):
return self._cached_hosts.next_addrs(key)
async def _create_connection(self, req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout') -> ResponseHandler:
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
"""Create connection.
Has same keyword arguments as BaseEventLoop.create_connection.
"""
if req.proxy:
_, proto = await self._create_proxy_connection(
req, traces, timeout)
_, proto = await self._create_proxy_connection(req, traces, timeout)
else:
_, proto = await self._create_direct_connection(
req, traces, timeout)
_, proto = await self._create_direct_connection(req, traces, timeout)
return proto
@ -873,16 +906,15 @@ class TCPConnector(BaseConnector):
sslcontext.options |= ssl.OP_NO_COMPRESSION
except AttributeError as attr_err:
warnings.warn(
'{!s}: The Python interpreter is compiled '
'against OpenSSL < 1.0.0. Ref: '
'https://docs.python.org/3/library/ssl.html'
'#ssl.OP_NO_COMPRESSION'.
format(attr_err),
"{!s}: The Python interpreter is compiled "
"against OpenSSL < 1.0.0. Ref: "
"https://docs.python.org/3/library/ssl.html"
"#ssl.OP_NO_COMPRESSION".format(attr_err),
)
sslcontext.set_default_verify_paths()
return sslcontext
def _get_ssl_context(self, req: 'ClientRequest') -> Optional[SSLContext]:
def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]:
"""Logic to get the correct SSL context
0. if req.ssl is false, return None
@ -898,7 +930,7 @@ class TCPConnector(BaseConnector):
"""
if req.is_ssl():
if ssl is None: # pragma: no cover
raise RuntimeError('SSL is not supported.')
raise RuntimeError("SSL is not supported.")
sslcontext = req.ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
@ -915,8 +947,7 @@ class TCPConnector(BaseConnector):
else:
return None
def _get_fingerprint(self,
req: 'ClientRequest') -> Optional['Fingerprint']:
def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]:
ret = req.ssl
if isinstance(ret, Fingerprint):
return ret
@ -926,45 +957,54 @@ class TCPConnector(BaseConnector):
return None
async def _wrap_create_connection(
self, *args: Any,
req: 'ClientRequest',
timeout: 'ClientTimeout',
client_error: Type[Exception]=ClientConnectorError,
**kwargs: Any) -> Tuple[asyncio.Transport, ResponseHandler]:
self,
*args: Any,
req: "ClientRequest",
timeout: "ClientTimeout",
client_error: Type[Exception] = ClientConnectorError,
**kwargs: Any,
) -> Tuple[asyncio.Transport, ResponseHandler]:
try:
with CeilTimeout(timeout.sock_connect):
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
except cert_errors as exc:
raise ClientConnectorCertificateError(
req.connection_key, exc) from exc
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
raise client_error(req.connection_key, exc) from exc
async def _create_direct_connection(
self,
req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout',
*,
client_error: Type[Exception]=ClientConnectorError
self,
req: "ClientRequest",
traces: List["Trace"],
timeout: "ClientTimeout",
*,
client_error: Type[Exception] = ClientConnectorError,
) -> Tuple[asyncio.Transport, ResponseHandler]:
sslcontext = self._get_ssl_context(req)
fingerprint = self._get_fingerprint(req)
host = req.url.raw_host
assert host is not None
port = req.port
assert port is not None
host_resolved = asyncio.ensure_future(
self._resolve_host(host, port, traces=traces), loop=self._loop
)
try:
# Cancelling this lookup should not cancel the underlying lookup
# or else the cancel event will get broadcast to all the waiters
# across all connections.
host = req.url.raw_host
assert host is not None
port = req.port
assert port is not None
hosts = await asyncio.shield(self._resolve_host(
host,
port,
traces=traces), loop=self._loop)
hosts = await asyncio.shield(host_resolved)
except asyncio.CancelledError:
def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
with suppress(Exception, asyncio.CancelledError):
fut.result()
host_resolved.add_done_callback(drop_exception)
raise
except OSError as exc:
# in case of proxy it is not ClientProxyConnectionError
# it is problem of resolving proxy ip itself
@ -973,17 +1013,24 @@ class TCPConnector(BaseConnector):
last_exc = None # type: Optional[Exception]
for hinfo in hosts:
host = hinfo['host']
port = hinfo['port']
host = hinfo["host"]
port = hinfo["port"]
try:
transp, proto = await self._wrap_create_connection(
self._factory, host, port, timeout=timeout,
ssl=sslcontext, family=hinfo['family'],
proto=hinfo['proto'], flags=hinfo['flags'],
server_hostname=hinfo['hostname'] if sslcontext else None,
self._factory,
host,
port,
timeout=timeout,
ssl=sslcontext,
family=hinfo["family"],
proto=hinfo["proto"],
flags=hinfo["flags"],
server_hostname=hinfo["hostname"] if sslcontext else None,
local_addr=self._local_addr,
req=req, client_error=client_error)
req=req,
client_error=client_error,
)
except ClientConnectorError as exc:
last_exc = exc
continue
@ -1004,10 +1051,7 @@ class TCPConnector(BaseConnector):
raise last_exc
async def _create_proxy_connection(
self,
req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout'
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> Tuple[asyncio.Transport, ResponseHandler]:
headers = {} # type: Dict[str, str]
if req.proxy_headers is not None:
@ -1017,15 +1061,18 @@ class TCPConnector(BaseConnector):
url = req.proxy
assert url is not None
proxy_req = ClientRequest(
hdrs.METH_GET, url,
hdrs.METH_GET,
url,
headers=headers,
auth=req.proxy_auth,
loop=self._loop,
ssl=req.ssl)
ssl=req.ssl,
)
# create connection to proxy server
transport, proto = await self._create_direct_connection(
proxy_req, [], timeout, client_error=ClientProxyConnectionError)
proxy_req, [], timeout, client_error=ClientProxyConnectionError
)
# Many HTTP proxies has buggy keepalive support. Let's not
# reuse connection but close it after processing every
@ -1052,10 +1099,9 @@ class TCPConnector(BaseConnector):
# asyncio handles this perfectly
proxy_req.method = hdrs.METH_CONNECT
proxy_req.url = req.url
key = attr.evolve(req.connection_key,
proxy=None,
proxy_auth=None,
proxy_headers_hash=None)
key = attr.evolve(
req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
)
conn = Connection(self, key, proto, self._loop)
proxy_resp = await proxy_req.send(conn)
try:
@ -1080,21 +1126,24 @@ class TCPConnector(BaseConnector):
resp.history,
status=resp.status,
message=message,
headers=resp.headers)
rawsock = transport.get_extra_info('socket', default=None)
headers=resp.headers,
)
rawsock = transport.get_extra_info("socket", default=None)
if rawsock is None:
raise RuntimeError(
"Transport does not expose socket instance")
raise RuntimeError("Transport does not expose socket instance")
# Duplicate the socket, so now we can close proxy transport
rawsock = rawsock.dup()
finally:
transport.close()
transport, proto = await self._wrap_create_connection(
self._factory, timeout=timeout,
ssl=sslcontext, sock=rawsock,
self._factory,
timeout=timeout,
ssl=sslcontext,
sock=rawsock,
server_hostname=req.host,
req=req)
req=req,
)
finally:
proxy_resp.close()
@ -1113,13 +1162,22 @@ class UnixConnector(BaseConnector):
loop - Optional event loop.
"""
def __init__(self, path: str, force_close: bool=False,
keepalive_timeout: Union[object, float, None]=sentinel,
limit: int=100, limit_per_host: int=0,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
super().__init__(force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit, limit_per_host=limit_per_host, loop=loop)
def __init__(
self,
path: str,
force_close: bool = False,
keepalive_timeout: Union[object, float, None] = sentinel,
limit: int = 100,
limit_per_host: int = 0,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(
force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit,
limit_per_host=limit_per_host,
loop=loop,
)
self._path = path
@property
@ -1127,13 +1185,14 @@ class UnixConnector(BaseConnector):
"""Path to unix socket."""
return self._path
async def _create_connection(self, req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout') -> ResponseHandler:
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
with CeilTimeout(timeout.sock_connect):
_, proto = await self._loop.create_unix_connection(
self._factory, self._path)
self._factory, self._path
)
except OSError as exc:
raise ClientConnectorError(req.connection_key, exc) from exc
@ -1155,16 +1214,26 @@ class NamedPipeConnector(BaseConnector):
loop - Optional event loop.
"""
def __init__(self, path: str, force_close: bool=False,
keepalive_timeout: Union[object, float, None]=sentinel,
limit: int=100, limit_per_host: int=0,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
super().__init__(force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit, limit_per_host=limit_per_host, loop=loop)
if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore # noqa
raise RuntimeError("Named Pipes only available in proactor "
"loop under windows")
def __init__(
self,
path: str,
force_close: bool = False,
keepalive_timeout: Union[object, float, None] = sentinel,
limit: int = 100,
limit_per_host: int = 0,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(
force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit,
limit_per_host=limit_per_host,
loop=loop,
)
if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore
raise RuntimeError(
"Named Pipes only available in proactor " "loop under windows"
)
self._path = path
@property
@ -1172,12 +1241,12 @@ class NamedPipeConnector(BaseConnector):
"""Path to the named pipe."""
return self._path
async def _create_connection(self, req: 'ClientRequest',
traces: List['Trace'],
timeout: 'ClientTimeout') -> ResponseHandler:
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
with CeilTimeout(timeout.sock_connect):
_, proto = await self._loop.create_pipe_connection( # type: ignore # noqa
_, proto = await self._loop.create_pipe_connection( # type: ignore
self._factory, self._path
)
# the drain is required so that the connection_made is called

View File

@ -5,7 +5,7 @@ import pathlib
import pickle
import re
from collections import defaultdict
from http.cookies import BaseCookie, Morsel, SimpleCookie # noqa
from http.cookies import BaseCookie, Morsel, SimpleCookie
from typing import ( # noqa
DefaultDict,
Dict,
@ -25,10 +25,10 @@ from .abc import AbstractCookieJar
from .helpers import is_ip_address, next_whole_second
from .typedefs import LooseCookies, PathLike
__all__ = ('CookieJar', 'DummyCookieJar')
__all__ = ("CookieJar", "DummyCookieJar")
CookieItem = Union[str, 'Morsel[str]']
CookieItem = Union[str, "Morsel[str]"]
class CookieJar(AbstractCookieJar):
@ -36,37 +36,55 @@ class CookieJar(AbstractCookieJar):
DATE_TOKENS_RE = re.compile(
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)")
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
)
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
DATE_MONTH_RE = re.compile("(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|"
"(aug)|(sep)|(oct)|(nov)|(dec)", re.I)
DATE_MONTH_RE = re.compile(
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
re.I,
)
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
MAX_TIME = datetime.datetime.max.replace(
tzinfo=datetime.timezone.utc)
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
def __init__(self, *, unsafe: bool=False,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
def __init__(
self,
*,
unsafe: bool = False,
quote_cookie: bool = True,
loop: Optional[asyncio.AbstractEventLoop] = None
) -> None:
super().__init__(loop=loop)
self._cookies = defaultdict(SimpleCookie) #type: DefaultDict[str, SimpleCookie] # noqa
self._cookies = defaultdict(
SimpleCookie
) # type: DefaultDict[str, SimpleCookie[str]]
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
self._unsafe = unsafe
self._quote_cookie = quote_cookie
self._next_expiration = next_whole_second()
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime] # noqa: E501
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
# #4515: datetime.max may not be representable on 32-bit platforms
self._max_time = self.MAX_TIME
try:
self._max_time.timestamp()
except OverflowError:
self._max_time = self.MAX_32BIT_TIME
def save(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode='wb') as f:
with file_path.open(mode="wb") as f:
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
def load(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode='rb') as f:
with file_path.open(mode="rb") as f:
self._cookies = pickle.load(f)
def clear(self) -> None:
@ -75,7 +93,7 @@ class CookieJar(AbstractCookieJar):
self._next_expiration = next_whole_second()
self._expirations.clear()
def __iter__(self) -> 'Iterator[Morsel[str]]':
def __iter__(self) -> "Iterator[Morsel[str]]":
self._do_expiration()
for val in self._cookies.values():
yield from val.values()
@ -89,7 +107,7 @@ class CookieJar(AbstractCookieJar):
return
if not self._expirations:
return
next_expiration = self.MAX_TIME
next_expiration = self._max_time
to_del = []
cookies = self._cookies
expirations = self._expirations
@ -104,19 +122,17 @@ class CookieJar(AbstractCookieJar):
del expirations[key]
try:
self._next_expiration = (next_expiration.replace(microsecond=0) +
datetime.timedelta(seconds=1))
self._next_expiration = next_expiration.replace(
microsecond=0
) + datetime.timedelta(seconds=1)
except OverflowError:
self._next_expiration = self.MAX_TIME
self._next_expiration = self._max_time
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str
) -> None:
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
self._next_expiration = min(self._next_expiration, when)
self._expirations[(domain, name)] = when
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
hostname = response_url.raw_host
@ -125,18 +141,18 @@ class CookieJar(AbstractCookieJar):
return
if isinstance(cookies, Mapping):
cookies = cookies.items() # type: ignore
cookies = cookies.items()
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
tmp = SimpleCookie()
tmp = SimpleCookie() # type: SimpleCookie[str]
tmp[name] = cookie # type: ignore
cookie = tmp[name]
domain = cookie["domain"]
# ignore domains with trailing dots
if domain.endswith('.'):
if domain.endswith("."):
domain = ""
del cookie["domain"]
@ -163,7 +179,7 @@ class CookieJar(AbstractCookieJar):
path = "/"
else:
# Cut everything from the last slash to the end
path = "/" + path[1:path.rfind("/")]
path = "/" + path[1 : path.rfind("/")]
cookie["path"] = path
max_age = cookie["max-age"]
@ -171,13 +187,12 @@ class CookieJar(AbstractCookieJar):
try:
delta_seconds = int(max_age)
try:
max_age_expiration = (
datetime.datetime.now(datetime.timezone.utc) +
datetime.timedelta(seconds=delta_seconds))
max_age_expiration = datetime.datetime.now(
datetime.timezone.utc
) + datetime.timedelta(seconds=delta_seconds)
except OverflowError:
max_age_expiration = self.MAX_TIME
self._expire_cookie(max_age_expiration,
domain, name)
max_age_expiration = self._max_time
self._expire_cookie(max_age_expiration, domain, name)
except ValueError:
cookie["max-age"] = ""
@ -186,8 +201,7 @@ class CookieJar(AbstractCookieJar):
if expires:
expire_time = self._parse_date(expires)
if expire_time:
self._expire_cookie(expire_time,
domain, name)
self._expire_cookie(expire_time, domain, name)
else:
cookie["expires"] = ""
@ -195,11 +209,15 @@ class CookieJar(AbstractCookieJar):
self._do_expiration()
def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]':
def filter_cookies(
self, request_url: URL = URL()
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
"""Returns this jar's cookies filtered by their attributes."""
self._do_expiration()
request_url = URL(request_url)
filtered = SimpleCookie()
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
SimpleCookie() if self._quote_cookie else BaseCookie()
)
hostname = request_url.raw_host or ""
is_not_secure = request_url.scheme not in ("https", "wss")
@ -229,7 +247,7 @@ class CookieJar(AbstractCookieJar):
# It's critical we use the Morsel so the coded_value
# (based on cookie version) is preserved
mrsl_val = cast('Morsel[str]', cookie.get(cookie.key, Morsel()))
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
filtered[name] = mrsl_val
@ -244,7 +262,7 @@ class CookieJar(AbstractCookieJar):
if not hostname.endswith(domain):
return False
non_matching = hostname[:-len(domain)]
non_matching = hostname[: -len(domain)]
if not non_matching.endswith("."):
return False
@ -266,7 +284,7 @@ class CookieJar(AbstractCookieJar):
if cookie_path.endswith("/"):
return True
non_matching = req_path[len(cookie_path):]
non_matching = req_path[len(cookie_path) :]
return non_matching.startswith("/")
@ -294,8 +312,7 @@ class CookieJar(AbstractCookieJar):
time_match = cls.DATE_HMS_TIME_RE.match(token)
if time_match:
found_time = True
hour, minute, second = [
int(s) for s in time_match.groups()]
hour, minute, second = [int(s) for s in time_match.groups()]
continue
if not found_day:
@ -333,9 +350,9 @@ class CookieJar(AbstractCookieJar):
if year < 1601 or hour > 23 or minute > 59 or second > 59:
return None
return datetime.datetime(year, month, day,
hour, minute, second,
tzinfo=datetime.timezone.utc)
return datetime.datetime(
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
)
class DummyCookieJar(AbstractCookieJar):
@ -345,11 +362,10 @@ class DummyCookieJar(AbstractCookieJar):
"""
def __init__(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
super().__init__(loop=loop)
def __iter__(self) -> 'Iterator[Morsel[str]]':
def __iter__(self) -> "Iterator[Morsel[str]]":
while False:
yield None
@ -359,10 +375,8 @@ class DummyCookieJar(AbstractCookieJar):
def clear(self) -> None:
pass
def update_cookies(self,
cookies: LooseCookies,
response_url: URL=URL()) -> None:
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
pass
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]':
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
return SimpleCookie()

View File

@ -1,5 +1,5 @@
import io
from typing import Any, Iterable, List, Optional # noqa
from typing import Any, Iterable, List, Optional
from urllib.parse import urlencode
from multidict import MultiDict, MultiDictProxy
@ -8,20 +8,23 @@ from . import hdrs, multipart, payload
from .helpers import guess_filename
from .payload import Payload
__all__ = ('FormData',)
__all__ = ("FormData",)
class FormData:
"""Helper class for multipart/form-data and
application/x-www-form-urlencoded body generation."""
def __init__(self, fields:
Iterable[Any]=(),
quote_fields: bool=True,
charset: Optional[str]=None) -> None:
self._writer = multipart.MultipartWriter('form-data')
def __init__(
self,
fields: Iterable[Any] = (),
quote_fields: bool = True,
charset: Optional[str] = None,
) -> None:
self._writer = multipart.MultipartWriter("form-data")
self._fields = [] # type: List[Any]
self._is_multipart = False
self._is_processed = False
self._quote_fields = quote_fields
self._charset = charset
@ -35,10 +38,15 @@ class FormData:
def is_multipart(self) -> bool:
return self._is_multipart
def add_field(self, name: str, value: Any, *,
content_type: Optional[str]=None,
filename: Optional[str]=None,
content_transfer_encoding: Optional[str]=None) -> None:
def add_field(
self,
name: str,
value: Any,
*,
content_type: Optional[str] = None,
filename: Optional[str] = None,
content_transfer_encoding: Optional[str] = None
) -> None:
if isinstance(value, io.IOBase):
self._is_multipart = True
@ -46,27 +54,31 @@ class FormData:
if filename is None and content_transfer_encoding is None:
filename = name
type_options = MultiDict({'name': name})
type_options = MultiDict({"name": name}) # type: MultiDict[str]
if filename is not None and not isinstance(filename, str):
raise TypeError('filename must be an instance of str. '
'Got: %s' % filename)
raise TypeError(
"filename must be an instance of str. " "Got: %s" % filename
)
if filename is None and isinstance(value, io.IOBase):
filename = guess_filename(value, name)
if filename is not None:
type_options['filename'] = filename
type_options["filename"] = filename
self._is_multipart = True
headers = {}
if content_type is not None:
if not isinstance(content_type, str):
raise TypeError('content_type must be an instance of str. '
'Got: %s' % content_type)
raise TypeError(
"content_type must be an instance of str. " "Got: %s" % content_type
)
headers[hdrs.CONTENT_TYPE] = content_type
self._is_multipart = True
if content_transfer_encoding is not None:
if not isinstance(content_transfer_encoding, str):
raise TypeError('content_transfer_encoding must be an instance'
' of str. Got: %s' % content_transfer_encoding)
raise TypeError(
"content_transfer_encoding must be an instance"
" of str. Got: %s" % content_transfer_encoding
)
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
self._is_multipart = True
@ -79,7 +91,7 @@ class FormData:
rec = to_add.pop(0)
if isinstance(rec, io.IOBase):
k = guess_filename(rec, 'unknown')
k = guess_filename(rec, "unknown")
self.add_field(k, rec) # type: ignore
elif isinstance(rec, (MultiDictProxy, MultiDict)):
@ -90,49 +102,56 @@ class FormData:
self.add_field(k, fp) # type: ignore
else:
raise TypeError('Only io.IOBase, multidict and (name, file) '
'pairs allowed, use .add_field() for passing '
'more complex parameters, got {!r}'
.format(rec))
raise TypeError(
"Only io.IOBase, multidict and (name, file) "
"pairs allowed, use .add_field() for passing "
"more complex parameters, got {!r}".format(rec)
)
def _gen_form_urlencoded(self) -> payload.BytesPayload:
# form data (x-www-form-urlencoded)
data = []
for type_options, _, value in self._fields:
data.append((type_options['name'], value))
data.append((type_options["name"], value))
charset = self._charset if self._charset is not None else 'utf-8'
charset = self._charset if self._charset is not None else "utf-8"
if charset == 'utf-8':
content_type = 'application/x-www-form-urlencoded'
if charset == "utf-8":
content_type = "application/x-www-form-urlencoded"
else:
content_type = ('application/x-www-form-urlencoded; '
'charset=%s' % charset)
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
return payload.BytesPayload(
urlencode(data, doseq=True, encoding=charset).encode(),
content_type=content_type)
content_type=content_type,
)
def _gen_form_data(self) -> multipart.MultipartWriter:
"""Encode a list of fields using the multipart/form-data MIME format"""
if self._is_processed:
raise RuntimeError("Form data has been processed already")
for dispparams, headers, value in self._fields:
try:
if hdrs.CONTENT_TYPE in headers:
part = payload.get_payload(
value, content_type=headers[hdrs.CONTENT_TYPE],
headers=headers, encoding=self._charset)
value,
content_type=headers[hdrs.CONTENT_TYPE],
headers=headers,
encoding=self._charset,
)
else:
part = payload.get_payload(
value, headers=headers, encoding=self._charset)
value, headers=headers, encoding=self._charset
)
except Exception as exc:
raise TypeError(
'Can not serialize value type: %r\n '
'headers: %r\n value: %r' % (
type(value), headers, value)) from exc
"Can not serialize value type: %r\n "
"headers: %r\n value: %r" % (type(value), headers, value)
) from exc
if dispparams:
part.set_content_disposition(
'form-data', quote_fields=self._quote_fields, **dispparams
"form-data", quote_fields=self._quote_fields, **dispparams
)
# FIXME cgi.FieldStorage doesn't likes body parts with
# Content-Length which were sent via chunked transfer encoding
@ -141,6 +160,7 @@ class FormData:
self._writer.append_payload(part)
self._is_processed = True
return self._writer
def __call__(self) -> Payload:

View File

@ -7,7 +7,7 @@ from .helpers import NO_EXTENSIONS
@total_ordering
class FrozenList(MutableSequence):
__slots__ = ('_frozen', '_items')
__slots__ = ("_frozen", "_items")
def __init__(self, items=None):
self._frozen = False
@ -58,14 +58,14 @@ class FrozenList(MutableSequence):
self._items.insert(pos, item)
def __repr__(self):
return '<FrozenList(frozen={}, {!r})>'.format(self._frozen,
self._items)
return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
PyFrozenList = FrozenList
try:
from aiohttp._frozenlist import FrozenList as CFrozenList # type: ignore
if not NO_EXTENSIONS:
FrozenList = CFrozenList # type: ignore
except ImportError: # pragma: no cover

View File

@ -10,54 +10,37 @@ from typing import (
overload,
)
_T = TypeVar('_T')
_T = TypeVar("_T")
_Arg = Union[List[_T], Iterable[_T]]
class FrozenList(MutableSequence[_T], Generic[_T]):
def __init__(self, items: Optional[_Arg[_T]]=...) -> None: ...
def __init__(self, items: Optional[_Arg[_T]] = ...) -> None: ...
@property
def frozen(self) -> bool: ...
def freeze(self) -> None: ...
@overload
def __getitem__(self, i: int) -> _T: ...
@overload
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
@overload
def __setitem__(self, i: int, o: _T) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
@overload
def __delitem__(self, i: int) -> None: ...
@overload
def __delitem__(self, i: slice) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __reversed__(self) -> Iterator[_T]: ...
def __eq__(self, other: object) -> bool: ...
def __le__(self, other: FrozenList[_T]) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __lt__(self, other: FrozenList[_T]) -> bool: ...
def __ge__(self, other: FrozenList[_T]) -> bool: ...
def __gt__(self, other: FrozenList[_T]) -> bool: ...
def insert(self, pos: int, item: _T) -> None: ...
def __repr__(self) -> str: ...
# types for C accelerators are the same
CFrozenList = PyFrozenList = FrozenList

View File

@ -5,96 +5,104 @@
from multidict import istr
METH_ANY = '*'
METH_CONNECT = 'CONNECT'
METH_HEAD = 'HEAD'
METH_GET = 'GET'
METH_DELETE = 'DELETE'
METH_OPTIONS = 'OPTIONS'
METH_PATCH = 'PATCH'
METH_POST = 'POST'
METH_PUT = 'PUT'
METH_TRACE = 'TRACE'
METH_ANY = "*"
METH_CONNECT = "CONNECT"
METH_HEAD = "HEAD"
METH_GET = "GET"
METH_DELETE = "DELETE"
METH_OPTIONS = "OPTIONS"
METH_PATCH = "PATCH"
METH_POST = "POST"
METH_PUT = "PUT"
METH_TRACE = "TRACE"
METH_ALL = {METH_CONNECT, METH_HEAD, METH_GET, METH_DELETE,
METH_OPTIONS, METH_PATCH, METH_POST, METH_PUT, METH_TRACE}
METH_ALL = {
METH_CONNECT,
METH_HEAD,
METH_GET,
METH_DELETE,
METH_OPTIONS,
METH_PATCH,
METH_POST,
METH_PUT,
METH_TRACE,
}
ACCEPT = istr('Accept')
ACCEPT_CHARSET = istr('Accept-Charset')
ACCEPT_ENCODING = istr('Accept-Encoding')
ACCEPT_LANGUAGE = istr('Accept-Language')
ACCEPT_RANGES = istr('Accept-Ranges')
ACCESS_CONTROL_MAX_AGE = istr('Access-Control-Max-Age')
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr('Access-Control-Allow-Credentials')
ACCESS_CONTROL_ALLOW_HEADERS = istr('Access-Control-Allow-Headers')
ACCESS_CONTROL_ALLOW_METHODS = istr('Access-Control-Allow-Methods')
ACCESS_CONTROL_ALLOW_ORIGIN = istr('Access-Control-Allow-Origin')
ACCESS_CONTROL_EXPOSE_HEADERS = istr('Access-Control-Expose-Headers')
ACCESS_CONTROL_REQUEST_HEADERS = istr('Access-Control-Request-Headers')
ACCESS_CONTROL_REQUEST_METHOD = istr('Access-Control-Request-Method')
AGE = istr('Age')
ALLOW = istr('Allow')
AUTHORIZATION = istr('Authorization')
CACHE_CONTROL = istr('Cache-Control')
CONNECTION = istr('Connection')
CONTENT_DISPOSITION = istr('Content-Disposition')
CONTENT_ENCODING = istr('Content-Encoding')
CONTENT_LANGUAGE = istr('Content-Language')
CONTENT_LENGTH = istr('Content-Length')
CONTENT_LOCATION = istr('Content-Location')
CONTENT_MD5 = istr('Content-MD5')
CONTENT_RANGE = istr('Content-Range')
CONTENT_TRANSFER_ENCODING = istr('Content-Transfer-Encoding')
CONTENT_TYPE = istr('Content-Type')
COOKIE = istr('Cookie')
DATE = istr('Date')
DESTINATION = istr('Destination')
DIGEST = istr('Digest')
ETAG = istr('Etag')
EXPECT = istr('Expect')
EXPIRES = istr('Expires')
FORWARDED = istr('Forwarded')
FROM = istr('From')
HOST = istr('Host')
IF_MATCH = istr('If-Match')
IF_MODIFIED_SINCE = istr('If-Modified-Since')
IF_NONE_MATCH = istr('If-None-Match')
IF_RANGE = istr('If-Range')
IF_UNMODIFIED_SINCE = istr('If-Unmodified-Since')
KEEP_ALIVE = istr('Keep-Alive')
LAST_EVENT_ID = istr('Last-Event-ID')
LAST_MODIFIED = istr('Last-Modified')
LINK = istr('Link')
LOCATION = istr('Location')
MAX_FORWARDS = istr('Max-Forwards')
ORIGIN = istr('Origin')
PRAGMA = istr('Pragma')
PROXY_AUTHENTICATE = istr('Proxy-Authenticate')
PROXY_AUTHORIZATION = istr('Proxy-Authorization')
RANGE = istr('Range')
REFERER = istr('Referer')
RETRY_AFTER = istr('Retry-After')
SEC_WEBSOCKET_ACCEPT = istr('Sec-WebSocket-Accept')
SEC_WEBSOCKET_VERSION = istr('Sec-WebSocket-Version')
SEC_WEBSOCKET_PROTOCOL = istr('Sec-WebSocket-Protocol')
SEC_WEBSOCKET_EXTENSIONS = istr('Sec-WebSocket-Extensions')
SEC_WEBSOCKET_KEY = istr('Sec-WebSocket-Key')
SEC_WEBSOCKET_KEY1 = istr('Sec-WebSocket-Key1')
SERVER = istr('Server')
SET_COOKIE = istr('Set-Cookie')
TE = istr('TE')
TRAILER = istr('Trailer')
TRANSFER_ENCODING = istr('Transfer-Encoding')
UPGRADE = istr('Upgrade')
WEBSOCKET = istr('WebSocket')
URI = istr('URI')
USER_AGENT = istr('User-Agent')
VARY = istr('Vary')
VIA = istr('Via')
WANT_DIGEST = istr('Want-Digest')
WARNING = istr('Warning')
WWW_AUTHENTICATE = istr('WWW-Authenticate')
X_FORWARDED_FOR = istr('X-Forwarded-For')
X_FORWARDED_HOST = istr('X-Forwarded-Host')
X_FORWARDED_PROTO = istr('X-Forwarded-Proto')
ACCEPT = istr("Accept")
ACCEPT_CHARSET = istr("Accept-Charset")
ACCEPT_ENCODING = istr("Accept-Encoding")
ACCEPT_LANGUAGE = istr("Accept-Language")
ACCEPT_RANGES = istr("Accept-Ranges")
ACCESS_CONTROL_MAX_AGE = istr("Access-Control-Max-Age")
ACCESS_CONTROL_ALLOW_CREDENTIALS = istr("Access-Control-Allow-Credentials")
ACCESS_CONTROL_ALLOW_HEADERS = istr("Access-Control-Allow-Headers")
ACCESS_CONTROL_ALLOW_METHODS = istr("Access-Control-Allow-Methods")
ACCESS_CONTROL_ALLOW_ORIGIN = istr("Access-Control-Allow-Origin")
ACCESS_CONTROL_EXPOSE_HEADERS = istr("Access-Control-Expose-Headers")
ACCESS_CONTROL_REQUEST_HEADERS = istr("Access-Control-Request-Headers")
ACCESS_CONTROL_REQUEST_METHOD = istr("Access-Control-Request-Method")
AGE = istr("Age")
ALLOW = istr("Allow")
AUTHORIZATION = istr("Authorization")
CACHE_CONTROL = istr("Cache-Control")
CONNECTION = istr("Connection")
CONTENT_DISPOSITION = istr("Content-Disposition")
CONTENT_ENCODING = istr("Content-Encoding")
CONTENT_LANGUAGE = istr("Content-Language")
CONTENT_LENGTH = istr("Content-Length")
CONTENT_LOCATION = istr("Content-Location")
CONTENT_MD5 = istr("Content-MD5")
CONTENT_RANGE = istr("Content-Range")
CONTENT_TRANSFER_ENCODING = istr("Content-Transfer-Encoding")
CONTENT_TYPE = istr("Content-Type")
COOKIE = istr("Cookie")
DATE = istr("Date")
DESTINATION = istr("Destination")
DIGEST = istr("Digest")
ETAG = istr("Etag")
EXPECT = istr("Expect")
EXPIRES = istr("Expires")
FORWARDED = istr("Forwarded")
FROM = istr("From")
HOST = istr("Host")
IF_MATCH = istr("If-Match")
IF_MODIFIED_SINCE = istr("If-Modified-Since")
IF_NONE_MATCH = istr("If-None-Match")
IF_RANGE = istr("If-Range")
IF_UNMODIFIED_SINCE = istr("If-Unmodified-Since")
KEEP_ALIVE = istr("Keep-Alive")
LAST_EVENT_ID = istr("Last-Event-ID")
LAST_MODIFIED = istr("Last-Modified")
LINK = istr("Link")
LOCATION = istr("Location")
MAX_FORWARDS = istr("Max-Forwards")
ORIGIN = istr("Origin")
PRAGMA = istr("Pragma")
PROXY_AUTHENTICATE = istr("Proxy-Authenticate")
PROXY_AUTHORIZATION = istr("Proxy-Authorization")
RANGE = istr("Range")
REFERER = istr("Referer")
RETRY_AFTER = istr("Retry-After")
SEC_WEBSOCKET_ACCEPT = istr("Sec-WebSocket-Accept")
SEC_WEBSOCKET_VERSION = istr("Sec-WebSocket-Version")
SEC_WEBSOCKET_PROTOCOL = istr("Sec-WebSocket-Protocol")
SEC_WEBSOCKET_EXTENSIONS = istr("Sec-WebSocket-Extensions")
SEC_WEBSOCKET_KEY = istr("Sec-WebSocket-Key")
SEC_WEBSOCKET_KEY1 = istr("Sec-WebSocket-Key1")
SERVER = istr("Server")
SET_COOKIE = istr("Set-Cookie")
TE = istr("TE")
TRAILER = istr("Trailer")
TRANSFER_ENCODING = istr("Transfer-Encoding")
UPGRADE = istr("Upgrade")
URI = istr("URI")
USER_AGENT = istr("User-Agent")
VARY = istr("Vary")
VIA = istr("Via")
WANT_DIGEST = istr("Want-Digest")
WARNING = istr("Warning")
WWW_AUTHENTICATE = istr("WWW-Authenticate")
X_FORWARDED_FOR = istr("X-Forwarded-For")
X_FORWARDED_HOST = istr("X-Forwarded-Host")
X_FORWARDED_PROTO = istr("X-Forwarded-Proto")

View File

@ -20,10 +20,12 @@ from contextlib import suppress
from math import ceil
from pathlib import Path
from types import TracebackType
from typing import ( # noqa
from typing import (
Any,
Callable,
Dict,
Generator,
Generic,
Iterable,
Iterator,
List,
@ -43,13 +45,14 @@ from urllib.request import getproxies
import async_timeout
import attr
from multidict import MultiDict, MultiDictProxy
from typing_extensions import Protocol
from yarl import URL
from . import hdrs
from .log import client_logger, internal_logger
from .typedefs import PathLike # noqa
__all__ = ('BasicAuth', 'ChainMapProxy')
__all__ = ("BasicAuth", "ChainMapProxy")
PY_36 = sys.version_info >= (3, 6)
PY_37 = sys.version_info >= (3, 7)
@ -57,6 +60,7 @@ PY_38 = sys.version_info >= (3, 8)
if not PY_37:
import idna_ssl
idna_ssl.patch_match_hostname()
try:
@ -66,116 +70,122 @@ except ImportError:
def all_tasks(
loop: Optional[asyncio.AbstractEventLoop] = None
) -> Set['asyncio.Task[Any]']:
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> Set["asyncio.Task[Any]"]:
tasks = list(asyncio.Task.all_tasks(loop))
return {t for t in tasks if not t.done()}
if PY_37:
all_tasks = getattr(asyncio, 'all_tasks') # noqa
all_tasks = getattr(asyncio, "all_tasks")
_T = TypeVar('_T')
_T = TypeVar("_T")
_S = TypeVar("_S")
sentinel = object() # type: Any
NO_EXTENSIONS = bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')) # type: bool
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
# for compatibility with older versions
DEBUG = (getattr(sys.flags, 'dev_mode', False) or
(not sys.flags.ignore_environment and
bool(os.environ.get('PYTHONASYNCIODEBUG')))) # type: bool
DEBUG = getattr(sys.flags, "dev_mode", False) or (
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
) # type: bool
CHAR = set(chr(i) for i in range(0, 128))
CTL = set(chr(i) for i in range(0, 32)) | {chr(127), }
SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']',
'?', '=', '{', '}', ' ', chr(9)}
CHAR = {chr(i) for i in range(0, 128)}
CTL = {chr(i) for i in range(0, 32)} | {
chr(127),
}
SEPARATORS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
" ",
chr(9),
}
TOKEN = CHAR ^ CTL ^ SEPARATORS
coroutines = asyncio.coroutines
old_debug = coroutines._DEBUG # type: ignore
# prevent "coroutine noop was never awaited" warning.
coroutines._DEBUG = False # type: ignore
class noop:
def __await__(self) -> Generator[None, None, None]:
yield
@asyncio.coroutine
def noop(*args, **kwargs): # type: ignore
return # type: ignore
async def noop2(*args: Any, **kwargs: Any) -> None:
return
coroutines._DEBUG = old_debug # type: ignore
class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])):
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
"""Http basic authentication helper."""
def __new__(cls, login: str,
password: str='',
encoding: str='latin1') -> 'BasicAuth':
def __new__(
cls, login: str, password: str = "", encoding: str = "latin1"
) -> "BasicAuth":
if login is None:
raise ValueError('None is not allowed as login value')
raise ValueError("None is not allowed as login value")
if password is None:
raise ValueError('None is not allowed as password value')
raise ValueError("None is not allowed as password value")
if ':' in login:
raise ValueError(
'A ":" is not allowed in login (RFC 1945#section-11.1)')
if ":" in login:
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
return super().__new__(cls, login, password, encoding)
@classmethod
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth':
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
"""Create a BasicAuth object from an Authorization HTTP header."""
try:
auth_type, encoded_credentials = auth_header.split(' ', 1)
auth_type, encoded_credentials = auth_header.split(" ", 1)
except ValueError:
raise ValueError('Could not parse authorization header.')
raise ValueError("Could not parse authorization header.")
if auth_type.lower() != 'basic':
raise ValueError('Unknown authorization method %s' % auth_type)
if auth_type.lower() != "basic":
raise ValueError("Unknown authorization method %s" % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode('ascii'), validate=True
encoded_credentials.encode("ascii"), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError('Invalid base64 encoding.')
raise ValueError("Invalid base64 encoding.")
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(':', 1)
username, password = decoded.split(":", 1)
except ValueError:
raise ValueError('Invalid credentials.')
raise ValueError("Invalid credentials.")
return cls(username, password, encoding=encoding)
@classmethod
def from_url(cls, url: URL,
*, encoding: str='latin1') -> Optional['BasicAuth']:
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
"""Create BasicAuth from url."""
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
if url.user is None:
return None
return cls(url.user, url.password or '', encoding=encoding)
return cls(url.user, url.password or "", encoding=encoding)
def encode(self) -> str:
"""Encode credentials."""
creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding)
return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
creds = (f"{self.login}:{self.password}").encode(self.encoding)
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
@ -192,7 +202,7 @@ def netrc_from_env() -> Optional[netrc.netrc]:
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get('NETRC')
netrc_env = os.environ.get("NETRC")
if netrc_env is not None:
netrc_path = Path(netrc_env)
@ -201,44 +211,46 @@ def netrc_from_env() -> Optional[netrc.netrc]:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug('Could not resolve home directory when '
'trying to look for .netrc file: %s', e)
client_logger.debug(
"Could not resolve home directory when "
"trying to look for .netrc file: %s",
e,
)
return None
netrc_path = home_dir / (
'_netrc' if platform.system() == 'Windows' else '.netrc')
"_netrc" if platform.system() == "Windows" else ".netrc"
)
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning('Could not parse .netrc file: %s', e)
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_path.is_file():
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning('Could not read .netrc file: %s', e)
client_logger.warning("Could not read .netrc file: %s", e)
return None
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ProxyInfo:
proxy = attr.ib(type=URL)
proxy_auth = attr.ib(type=Optional[BasicAuth])
proxy: URL
proxy_auth: Optional[BasicAuth]
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {k: URL(v) for k, v in getproxies().items()
if k in ('http', 'https')}
proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ("http", "https")}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme == 'https':
client_logger.warning(
"HTTPS proxies %s are not supported, ignoring", proxy)
if proxy.scheme == "https":
client_logger.warning("HTTPS proxies %s are not supported, ignoring", proxy)
continue
if netrc_obj and auth is None:
auth_from_netrc = None
@ -255,42 +267,47 @@ def proxies_from_env() -> Dict[str, ProxyInfo]:
return ret
def current_task(loop: Optional[asyncio.AbstractEventLoop]=None) -> asyncio.Task: # type: ignore # noqa # Return type is intentionally Generic here
def current_task(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> "Optional[asyncio.Task[Any]]":
if PY_37:
return asyncio.current_task(loop=loop) # type: ignore
return asyncio.current_task(loop=loop)
else:
return asyncio.Task.current_task(loop=loop)
def get_running_loop(
loop: Optional[asyncio.AbstractEventLoop]=None
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> asyncio.AbstractEventLoop:
if loop is None:
loop = asyncio.get_event_loop()
if not loop.is_running():
warnings.warn("The object should be created from async function",
DeprecationWarning, stacklevel=3)
warnings.warn(
"The object should be created within an async function",
DeprecationWarning,
stacklevel=3,
)
if loop.get_debug():
internal_logger.warning(
"The object should be created from async function",
stack_info=True)
"The object should be created within an async function", stack_info=True
)
return loop
def isasyncgenfunction(obj: Any) -> bool:
func = getattr(inspect, 'isasyncgenfunction', None)
func = getattr(inspect, "isasyncgenfunction", None)
if func is not None:
return func(obj)
else:
return False
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class MimeType:
type = attr.ib(type=str)
subtype = attr.ib(type=str)
suffix = attr.ib(type=str)
parameters = attr.ib(type=MultiDictProxy) # type: MultiDictProxy[str]
type: str
subtype: str
suffix: str
parameters: "MultiDictProxy[str]"
@functools.lru_cache(maxsize=56)
@ -309,41 +326,48 @@ def parse_mimetype(mimetype: str) -> MimeType:
"""
if not mimetype:
return MimeType(type='', subtype='', suffix='',
parameters=MultiDictProxy(MultiDict()))
return MimeType(
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
)
parts = mimetype.split(';')
parts = mimetype.split(";")
params = MultiDict() # type: MultiDict[str]
for item in parts[1:]:
if not item:
continue
key, value = cast(Tuple[str, str],
item.split('=', 1) if '=' in item else (item, ''))
key, value = cast(
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
)
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == '*':
fulltype = '*/*'
if fulltype == "*":
fulltype = "*/*"
mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1))
if '/' in fulltype else (fulltype, ''))
stype, suffix = (cast(Tuple[str, str], stype.split('+', 1))
if '+' in stype else (stype, ''))
mtype, stype = (
cast(Tuple[str, str], fulltype.split("/", 1))
if "/" in fulltype
else (fulltype, "")
)
stype, suffix = (
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
)
return MimeType(type=mtype, subtype=stype, suffix=suffix,
parameters=MultiDictProxy(params))
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
)
def guess_filename(obj: Any, default: Optional[str]=None) -> Optional[str]:
name = getattr(obj, 'name', None)
if name and isinstance(name, str) and name[0] != '<' and name[-1] != '>':
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
name = getattr(obj, "name", None)
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
return Path(name).name
return default
def content_disposition_header(disptype: str,
quote_fields: bool=True,
**params: str) -> str:
def content_disposition_header(
disptype: str, quote_fields: bool = True, **params: str
) -> str:
"""Sets ``Content-Disposition`` header.
disptype is a disposition type: inline, attachment, form-data.
@ -352,26 +376,30 @@ def content_disposition_header(disptype: str,
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError('bad content disposition type {!r}'
''.format(disptype))
raise ValueError("bad content disposition type {!r}" "".format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError('bad content disposition parameter'
' {!r}={!r}'.format(key, val))
qval = quote(val, '') if quote_fields else val
raise ValueError(
"bad content disposition parameter" " {!r}={!r}".format(key, val)
)
qval = quote(val, "") if quote_fields else val
lparams.append((key, '"%s"' % qval))
if key == 'filename':
lparams.append(('filename*', "utf-8''" + qval))
sparams = '; '.join('='.join(pair) for pair in lparams)
value = '; '.join((value, sparams))
if key == "filename":
lparams.append(("filename*", "utf-8''" + qval))
sparams = "; ".join("=".join(pair) for pair in lparams)
value = "; ".join((value, sparams))
return value
class reify:
class _TSelf(Protocol):
_cache: Dict[str, Any]
class reify(Generic[_T]):
"""Use as a class method decorator. It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
@ -380,12 +408,12 @@ class reify:
"""
def __init__(self, wrapped: Callable[..., Any]) -> None:
def __init__(self, wrapped: Callable[..., _T]) -> None:
self.wrapped = wrapped
self.__doc__ = wrapped.__doc__
self.name = wrapped.__name__
def __get__(self, inst: Any, owner: Any) -> Any:
def __get__(self, inst: _TSelf, owner: Optional[Type[Any]] = None) -> _T:
try:
try:
return inst._cache[self.name]
@ -398,7 +426,7 @@ class reify:
return self
raise
def __set__(self, inst: Any, value: Any) -> None:
def __set__(self, inst: _TSelf, value: _T) -> None:
raise AttributeError("reified property is read-only")
@ -406,31 +434,35 @@ reify_py = reify
try:
from ._helpers import reify as reify_c
if not NO_EXTENSIONS:
reify = reify_c # type: ignore
except ImportError:
pass
_ipv4_pattern = (r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}'
r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$')
_ipv4_pattern = (
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
)
_ipv6_pattern = (
r'^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}'
r'(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)'
r'((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})'
r'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}'
r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}'
r'[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)'
r'(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}'
r':|:(:[A-F0-9]{1,4}){7})$')
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
r":|:(:[A-F0-9]{1,4}){7})$"
)
_ipv4_regex = re.compile(_ipv4_pattern)
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode('ascii'))
_ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
def _is_ip_address(
regex: Pattern[str], regexb: Pattern[bytes],
host: Optional[Union[str, bytes]]) -> bool:
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
) -> bool:
if host is None:
return False
if isinstance(host, str):
@ -438,26 +470,22 @@ def _is_ip_address(
elif isinstance(host, (bytes, bytearray, memoryview)):
return bool(regexb.match(host))
else:
raise TypeError("{} [{}] is not a str or bytes"
.format(host, type(host)))
raise TypeError("{} [{}] is not a str or bytes".format(host, type(host)))
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
def is_ip_address(
host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
return is_ipv4_address(host) or is_ipv6_address(host)
def next_whole_second() -> datetime.datetime:
"""Return current time rounded up to the next whole second."""
return (
datetime.datetime.now(
datetime.timezone.utc).replace(microsecond=0) +
datetime.timedelta(seconds=0)
)
return datetime.datetime.now(datetime.timezone.utc).replace(
microsecond=0
) + datetime.timedelta(seconds=0)
_cached_current_datetime = None # type: Optional[int]
@ -474,13 +502,31 @@ def rfc822_formatted_time() -> str:
# always English!
# Tuples are constants stored in codeobject!
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
_monthname = ("", # Dummy so we can use 1-based month numbers
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
_monthname = (
"", # Dummy so we can use 1-based month numbers
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
)
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
_weekdayname[wd], day, _monthname[month], year, hh, mm, ss
_weekdayname[wd],
day,
_monthname[month],
year,
hh,
mm,
ss,
)
_cached_current_datetime = now
return _cached_formatted_datetime
@ -494,10 +540,10 @@ def _weakref_handle(info): # type: ignore
getattr(ob, name)()
def weakref_handle(ob, name, timeout, loop, ceil_timeout=True): # type: ignore
def weakref_handle(ob, name, timeout, loop): # type: ignore
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if ceil_timeout:
if timeout >= 5:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
@ -505,35 +551,43 @@ def weakref_handle(ob, name, timeout, loop, ceil_timeout=True): # type: ignore
def call_later(cb, timeout, loop): # type: ignore
if timeout is not None and timeout > 0:
when = ceil(loop.time() + timeout)
when = loop.time() + timeout
if timeout > 5:
when = ceil(when)
return loop.call_at(when, cb)
class TimeoutHandle:
""" Timeout handle """
def __init__(self,
loop: asyncio.AbstractEventLoop,
timeout: Optional[float]) -> None:
def __init__(
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
) -> None:
self._timeout = timeout
self._loop = loop
self._callbacks = [] # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]] # noqa
self._callbacks = (
[]
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
def register(self, callback: Callable[..., None],
*args: Any, **kwargs: Any) -> None:
def register(
self, callback: Callable[..., None], *args: Any, **kwargs: Any
) -> None:
self._callbacks.append((callback, args, kwargs))
def close(self) -> None:
self._callbacks.clear()
def start(self) -> Optional[asyncio.Handle]:
if self._timeout is not None and self._timeout > 0:
at = ceil(self._loop.time() + self._timeout)
return self._loop.call_at(at, self.__call__)
timeout = self._timeout
if timeout is not None and timeout > 0:
when = self._loop.time() + timeout
if timeout >= 5:
when = ceil(when)
return self._loop.call_at(when, self.__call__)
else:
return None
def timer(self) -> 'BaseTimerContext':
def timer(self) -> "BaseTimerContext":
if self._timeout is not None and self._timeout > 0:
timer = TimerContext(self._loop)
self.register(timer.timeout)
@ -549,19 +603,21 @@ class TimeoutHandle:
self._callbacks.clear()
class BaseTimerContext(ContextManager['BaseTimerContext']):
class BaseTimerContext(ContextManager["BaseTimerContext"]):
pass
class TimerNoop(BaseTimerContext):
def __enter__(self) -> BaseTimerContext:
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
return False
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
return
class TimerContext(BaseTimerContext):
@ -576,8 +632,9 @@ class TimerContext(BaseTimerContext):
task = current_task(loop=self._loop)
if task is None:
raise RuntimeError('Timeout context manager should be used '
'inside a task')
raise RuntimeError(
"Timeout context manager should be used " "inside a task"
)
if self._cancelled:
task.cancel()
@ -586,9 +643,12 @@ class TimerContext(BaseTimerContext):
self._tasks.append(task)
return self
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> Optional[bool]:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
if self._tasks:
self._tasks.pop()
@ -605,22 +665,25 @@ class TimerContext(BaseTimerContext):
class CeilTimeout(async_timeout.timeout):
def __enter__(self) -> async_timeout.timeout:
if self._timeout is not None:
self._task = current_task(loop=self._loop)
if self._task is None:
raise RuntimeError(
'Timeout context manager should be used inside a task')
self._cancel_handler = self._loop.call_at(
ceil(self._loop.time() + self._timeout), self._cancel_task)
"Timeout context manager should be used inside a task"
)
now = self._loop.time()
delay = self._timeout
when = now + delay
if delay > 5:
when = ceil(when)
self._cancel_handler = self._loop.call_at(when, self._cancel_task)
return self
class HeadersMixin:
ATTRS = frozenset([
'_content_type', '_content_dict', '_stored_content_type'])
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
_content_type = None # type: Optional[str]
_content_dict = None # type: Optional[Dict[str, str]]
@ -630,7 +693,7 @@ class HeadersMixin:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = 'application/octet-stream'
self._content_type = "application/octet-stream"
self._content_dict = {}
else:
self._content_type, self._content_dict = cgi.parse_header(raw)
@ -649,7 +712,7 @@ class HeadersMixin:
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get('charset') # type: ignore
return self._content_dict.get("charset") # type: ignore
@property
def content_length(self) -> Optional[int]:
@ -662,25 +725,27 @@ class HeadersMixin:
return None
def set_result(fut: 'asyncio.Future[_T]', result: _T) -> None:
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
if not fut.done():
fut.set_result(result)
def set_exception(fut: 'asyncio.Future[_T]', exc: BaseException) -> None:
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
if not fut.done():
fut.set_exception(exc)
class ChainMapProxy(Mapping[str, Any]):
__slots__ = ('_maps',)
__slots__ = ("_maps",)
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
raise TypeError("Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__))
raise TypeError(
"Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__)
)
def __getitem__(self, key: str) -> Any:
for mapping in self._maps:
@ -690,7 +755,7 @@ class ChainMapProxy(Mapping[str, Any]):
pass
raise KeyError(key)
def get(self, key: str, default: Any=None) -> Any:
def get(self, key: str, default: Any = None) -> Any:
return self[key] if key in self else default
def __len__(self) -> int:
@ -712,4 +777,4 @@ class ChainMapProxy(Mapping[str, Any]):
def __repr__(self) -> str:
content = ", ".join(map(repr, self._maps))
return 'ChainMapProxy({})'.format(content)
return f"ChainMapProxy({content})"

View File

@ -1,50 +1,72 @@
import http.server
import sys
from typing import Mapping, Tuple # noqa
from typing import Mapping, Tuple
from . import __version__
from .http_exceptions import HttpProcessingError as HttpProcessingError
from .http_parser import HeadersParser as HeadersParser
from .http_parser import HttpParser as HttpParser
from .http_parser import HttpRequestParser as HttpRequestParser
from .http_parser import HttpResponseParser as HttpResponseParser
from .http_parser import RawRequestMessage as RawRequestMessage
from .http_parser import RawResponseMessage as RawResponseMessage
from .http_websocket import WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE
from .http_websocket import WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE
from .http_websocket import WS_KEY as WS_KEY
from .http_websocket import WebSocketError as WebSocketError
from .http_websocket import WebSocketReader as WebSocketReader
from .http_websocket import WebSocketWriter as WebSocketWriter
from .http_websocket import WSCloseCode as WSCloseCode
from .http_websocket import WSMessage as WSMessage
from .http_websocket import WSMsgType as WSMsgType
from .http_websocket import ws_ext_gen as ws_ext_gen
from .http_websocket import ws_ext_parse as ws_ext_parse
from .http_writer import HttpVersion as HttpVersion
from .http_writer import HttpVersion10 as HttpVersion10
from .http_writer import HttpVersion11 as HttpVersion11
from .http_writer import StreamWriter as StreamWriter
from .http_parser import (
HeadersParser as HeadersParser,
HttpParser as HttpParser,
HttpRequestParser as HttpRequestParser,
HttpResponseParser as HttpResponseParser,
RawRequestMessage as RawRequestMessage,
RawResponseMessage as RawResponseMessage,
)
from .http_websocket import (
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
WS_KEY as WS_KEY,
WebSocketError as WebSocketError,
WebSocketReader as WebSocketReader,
WebSocketWriter as WebSocketWriter,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
ws_ext_gen as ws_ext_gen,
ws_ext_parse as ws_ext_parse,
)
from .http_writer import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
StreamWriter as StreamWriter,
)
__all__ = (
'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE',
"HttpProcessingError",
"RESPONSES",
"SERVER_SOFTWARE",
# .http_writer
'StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11',
"StreamWriter",
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
# .http_parser
'HeadersParser', 'HttpParser',
'HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage',
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
# .http_websocket
'WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'ws_ext_gen', 'ws_ext_parse',
'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode',
"WS_CLOSED_MESSAGE",
"WS_CLOSING_MESSAGE",
"WS_KEY",
"WebSocketReader",
"WebSocketWriter",
"ws_ext_gen",
"ws_ext_parse",
"WSMessage",
"WebSocketError",
"WSMsgType",
"WSCloseCode",
)
SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format(
sys.version_info, __version__) # type: str
SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
sys.version_info, __version__
) # type: str
RESPONSES = http.server.BaseHTTPRequestHandler.responses # type: Mapping[int, Tuple[str, str]] # noqa
RESPONSES = (
http.server.BaseHTTPRequestHandler.responses
) # type: Mapping[int, Tuple[str, str]]

View File

@ -5,7 +5,7 @@ from typing import Optional, Union
from .typedefs import _CIMultiDict
__all__ = ('HttpProcessingError',)
__all__ = ("HttpProcessingError",)
class HttpProcessingError(Exception):
@ -19,32 +19,34 @@ class HttpProcessingError(Exception):
"""
code = 0
message = ''
message = ""
headers = None
def __init__(self, *,
code: Optional[int]=None,
message: str='',
headers: Optional[_CIMultiDict]=None) -> None:
def __init__(
self,
*,
code: Optional[int] = None,
message: str = "",
headers: Optional[_CIMultiDict] = None,
) -> None:
if code is not None:
self.code = code
self.headers = headers
self.message = message
def __str__(self) -> str:
return "%s, message=%r" % (self.code, self.message)
return f"{self.code}, message={self.message!r}"
def __repr__(self) -> str:
return "<%s: %s>" % (self.__class__.__name__, self)
return f"<{self.__class__.__name__}: {self}>"
class BadHttpMessage(HttpProcessingError):
code = 400
message = 'Bad Request'
message = "Bad Request"
def __init__(self, message: str, *,
headers: Optional[_CIMultiDict]=None) -> None:
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
super().__init__(message=message, headers=headers)
self.args = (message,)
@ -52,7 +54,7 @@ class BadHttpMessage(HttpProcessingError):
class HttpBadRequest(BadHttpMessage):
code = 400
message = 'Bad Request'
message = "Bad Request"
class PayloadEncodingError(BadHttpMessage):
@ -72,37 +74,32 @@ class ContentLengthError(PayloadEncodingError):
class LineTooLong(BadHttpMessage):
def __init__(self, line: str,
limit: str='Unknown',
actual_size: str='Unknown') -> None:
def __init__(
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
) -> None:
super().__init__(
"Got more than %s bytes (%s) when reading %s." % (
limit, actual_size, line))
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
)
self.args = (line, limit, actual_size)
class InvalidHeader(BadHttpMessage):
def __init__(self, hdr: Union[bytes, str]) -> None:
if isinstance(hdr, bytes):
hdr = hdr.decode('utf-8', 'surrogateescape')
super().__init__('Invalid HTTP Header: {}'.format(hdr))
hdr = hdr.decode("utf-8", "surrogateescape")
super().__init__(f"Invalid HTTP Header: {hdr}")
self.hdr = hdr
self.args = (hdr,)
class BadStatusLine(BadHttpMessage):
def __init__(self, line: str='') -> None:
def __init__(self, line: str = "") -> None:
if not isinstance(line, str):
line = repr(line)
super().__init__(f"Bad status line {line!r}")
self.args = (line,)
self.line = line
__str__ = Exception.__str__
__repr__ = Exception.__repr__
class InvalidURLError(BadHttpMessage):
pass

View File

@ -5,7 +5,7 @@ import re
import string
import zlib
from enum import IntEnum
from typing import Any, List, Optional, Tuple, Type, Union # noqa
from typing import Any, List, Optional, Tuple, Type, Union
from multidict import CIMultiDict, CIMultiDictProxy, istr
from yarl import URL
@ -28,14 +28,20 @@ from .typedefs import RawHeaders
try:
import brotli
HAS_BROTLI = True
except ImportError: # pragma: no cover
HAS_BROTLI = False
__all__ = (
'HeadersParser', 'HttpParser', 'HttpRequestParser', 'HttpResponseParser',
'RawRequestMessage', 'RawResponseMessage')
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
)
ASCIISET = set(string.printable)
@ -47,18 +53,39 @@ ASCIISET = set(string.printable)
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
METHRE = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
VERSRE = re.compile(r'HTTP/(\d+).(\d+)')
HDRRE = re.compile(rb'[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]')
VERSRE = re.compile(r"HTTP/(\d+).(\d+)")
HDRRE = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
RawRequestMessage = collections.namedtuple(
'RawRequestMessage',
['method', 'path', 'version', 'headers', 'raw_headers',
'should_close', 'compression', 'upgrade', 'chunked', 'url'])
"RawRequestMessage",
[
"method",
"path",
"version",
"headers",
"raw_headers",
"should_close",
"compression",
"upgrade",
"chunked",
"url",
],
)
RawResponseMessage = collections.namedtuple(
'RawResponseMessage',
['version', 'code', 'reason', 'headers', 'raw_headers',
'should_close', 'compression', 'upgrade', 'chunked'])
"RawResponseMessage",
[
"version",
"code",
"reason",
"headers",
"raw_headers",
"should_close",
"compression",
"upgrade",
"chunked",
],
)
class ParseState(IntEnum):
@ -78,18 +105,19 @@ class ChunkState(IntEnum):
class HeadersParser:
def __init__(self,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190) -> None:
def __init__(
self,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
) -> None:
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]', RawHeaders]:
self, lines: List[bytes]
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
headers = CIMultiDict() # type: CIMultiDict[str]
raw_headers = []
@ -100,20 +128,22 @@ class HeadersParser:
while line:
# Parse initial header name : value pair.
try:
bname, bvalue = line.split(b':', 1)
bname, bvalue = line.split(b":", 1)
except ValueError:
raise InvalidHeader(line) from None
bname = bname.strip(b' \t')
bname = bname.strip(b" \t")
bvalue = bvalue.lstrip()
if HDRRE.search(bname):
raise InvalidHeader(bname)
if len(bname) > self.max_field_size:
raise LineTooLong(
"request header name {}".format(
bname.decode("utf8", "xmlcharrefreplace")),
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(len(bname)))
str(len(bname)),
)
header_length = len(bvalue)
@ -130,10 +160,12 @@ class HeadersParser:
header_length += len(line)
if header_length > self.max_field_size:
raise LineTooLong(
'request header field {}'.format(
bname.decode("utf8", "xmlcharrefreplace")),
"request header field {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(header_length))
str(header_length),
)
bvalue_lst.append(line)
# next line
@ -143,20 +175,22 @@ class HeadersParser:
if line:
continuation = line[0] in (32, 9) # (' ', '\t')
else:
line = b''
line = b""
break
bvalue = b''.join(bvalue_lst)
bvalue = b"".join(bvalue_lst)
else:
if header_length > self.max_field_size:
raise LineTooLong(
'request header field {}'.format(
bname.decode("utf8", "xmlcharrefreplace")),
"request header field {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(header_length))
str(header_length),
)
bvalue = bvalue.strip()
name = bname.decode('utf-8', 'surrogateescape')
value = bvalue.decode('utf-8', 'surrogateescape')
name = bname.decode("utf-8", "surrogateescape")
value = bvalue.decode("utf-8", "surrogateescape")
headers.add(name, value)
raw_headers.append((bname, bvalue))
@ -165,20 +199,23 @@ class HeadersParser:
class HttpParser(abc.ABC):
def __init__(self, protocol: Optional[BaseProtocol]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190,
timer: Optional[BaseTimerContext]=None,
code: Optional[int]=None,
method: Optional[str]=None,
readall: bool=False,
payload_exception: Optional[Type[BaseException]]=None,
response_with_body: bool=True,
read_until_eof: bool=False,
auto_decompress: bool=True) -> None:
def __init__(
self,
protocol: Optional[BaseProtocol] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
limit: int = 2 ** 16,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
timer: Optional[BaseTimerContext] = None,
code: Optional[int] = None,
method: Optional[str] = None,
readall: bool = False,
payload_exception: Optional[Type[BaseException]] = None,
response_with_body: bool = True,
read_until_eof: bool = False,
auto_decompress: bool = True,
) -> None:
self.protocol = protocol
self.loop = loop
self.max_line_size = max_line_size
@ -193,14 +230,13 @@ class HttpParser(abc.ABC):
self.read_until_eof = read_until_eof
self._lines = [] # type: List[bytes]
self._tail = b''
self._tail = b""
self._upgraded = False
self._payload = None
self._payload_parser = None # type: Optional[HttpPayloadParser]
self._auto_decompress = auto_decompress
self._headers_parser = HeadersParser(max_line_size,
max_headers,
max_field_size)
self._limit = limit
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
@abc.abstractmethod
def parse_message(self, lines: List[bytes]) -> Any:
@ -216,27 +252,27 @@ class HttpParser(abc.ABC):
self._lines.append(self._tail)
if self._lines:
if self._lines[-1] != '\r\n':
self._lines.append(b'')
if self._lines[-1] != "\r\n":
self._lines.append(b"")
try:
return self.parse_message(self._lines)
except Exception:
return None
def feed_data(
self,
data: bytes,
SEP: bytes=b'\r\n',
EMPTY: bytes=b'',
CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH,
METH_CONNECT: str=hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr=hdrs.SEC_WEBSOCKET_KEY1
self,
data: bytes,
SEP: bytes = b"\r\n",
EMPTY: bytes = b"",
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
METH_CONNECT: str = hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
) -> Tuple[List[Any], bool, bytes]:
messages = []
if self._tail:
data, self._tail = self._tail + data, b''
data, self._tail = self._tail + data, b""
data_len = len(data)
start_pos = 0
@ -281,43 +317,72 @@ class HttpParser(abc.ABC):
self._upgraded = msg.upgrade
method = getattr(msg, 'method', self.method)
method = getattr(msg, "method", self.method)
assert self.protocol is not None
# calculate payload
if ((length is not None and length > 0) or
msg.chunked and not msg.upgrade):
if (
(length is not None and length > 0)
or msg.chunked
and not msg.upgrade
):
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload, length=length,
chunked=msg.chunked, method=method,
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code, readall=self.readall,
code=self.code,
readall=self.readall,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress)
auto_decompress=self._auto_decompress,
)
if not payload_parser.done:
self._payload_parser = payload_parser
elif method == METH_CONNECT:
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
self._upgraded = True
self._payload_parser = HttpPayloadParser(
payload, method=msg.method,
compression=msg.compression, readall=True,
auto_decompress=self._auto_decompress)
payload,
method=msg.method,
compression=msg.compression,
readall=True,
auto_decompress=self._auto_decompress,
)
else:
if (getattr(msg, 'code', 100) >= 199 and
length is None and self.read_until_eof):
if (
getattr(msg, "code", 100) >= 199
and length is None
and self.read_until_eof
):
payload = StreamReader(
self.protocol, timer=self.timer, loop=loop)
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload, length=length,
chunked=msg.chunked, method=method,
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code, readall=True,
code=self.code,
readall=True,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress)
auto_decompress=self._auto_decompress,
)
if not payload_parser.done:
self._payload_parser = payload_parser
else:
@ -339,17 +404,17 @@ class HttpParser(abc.ABC):
assert not self._lines
assert self._payload_parser is not None
try:
eof, data = self._payload_parser.feed_data(
data[start_pos:])
eof, data = self._payload_parser.feed_data(data[start_pos:])
except BaseException as exc:
if self.payload_exception is not None:
self._payload_parser.payload.set_exception(
self.payload_exception(str(exc)))
self.payload_exception(str(exc))
)
else:
self._payload_parser.payload.set_exception(exc)
eof = True
data = b''
data = b""
if eof:
start_pos = 0
@ -367,14 +432,10 @@ class HttpParser(abc.ABC):
return messages, self._upgraded, data
def parse_headers(
self,
lines: List[bytes]
) -> Tuple['CIMultiDictProxy[str]',
RawHeaders,
Optional[bool],
Optional[str],
bool,
bool]:
self, lines: List[bytes]
) -> Tuple[
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
@ -390,27 +451,33 @@ class HttpParser(abc.ABC):
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == 'close':
if v == "close":
close_conn = True
elif v == 'keep-alive':
elif v == "keep-alive":
close_conn = False
elif v == 'upgrade':
elif v == "upgrade":
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ('gzip', 'deflate', 'br'):
if enc in ("gzip", "deflate", "br"):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te and 'chunked' in te.lower():
if te and "chunked" in te.lower():
chunked = True
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
def set_upgraded(self, val: bool) -> None:
"""Set connection upgraded (to websocket) mode.
:param bool val: new state.
"""
self._upgraded = val
class HttpRequestParser(HttpParser):
"""Read request status line. Exception .http_exceptions.BadStatusLine
@ -420,7 +487,7 @@ class HttpRequestParser(HttpParser):
def parse_message(self, lines: List[bytes]) -> Any:
# request line
line = lines[0].decode('utf-8', 'surrogateescape')
line = lines[0].decode("utf-8", "surrogateescape")
try:
method, path, version = line.split(None, 2)
except ValueError:
@ -428,9 +495,11 @@ class HttpRequestParser(HttpParser):
if len(path) > self.max_line_size:
raise LineTooLong(
'Status line is too long',
str(self.max_line_size),
str(len(path)))
"Status line is too long", str(self.max_line_size), str(len(path))
)
path_part, _hash_separator, url_fragment = path.partition("#")
path_part, _question_mark_separator, qs_part = path_part.partition("?")
# method
if not METHRE.match(method):
@ -438,8 +507,8 @@ class HttpRequestParser(HttpParser):
# version
try:
if version.startswith('HTTP/'):
n1, n2 = version[5:].split('.', 1)
if version.startswith("HTTP/"):
n1, n2 = version[5:].split(".", 1)
version_o = HttpVersion(int(n1), int(n2))
else:
raise BadStatusLine(version)
@ -447,8 +516,14 @@ class HttpRequestParser(HttpParser):
raise BadStatusLine(version)
# read headers
(headers, raw_headers,
close, compression, upgrade, chunked) = self.parse_headers(lines)
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines)
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
@ -457,8 +532,26 @@ class HttpRequestParser(HttpParser):
close = False
return RawRequestMessage(
method, path, version_o, headers, raw_headers,
close, compression, upgrade, chunked, URL(path))
method,
path,
version_o,
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
# NOTE: parser does, otherwise it results into the same
# NOTE: HTTP Request-Line input producing different
# NOTE: `yarl.URL()` objects
URL.build(
path=path_part,
query_string=qs_part,
fragment=url_fragment,
encoded=True,
),
)
class HttpResponseParser(HttpParser):
@ -468,7 +561,7 @@ class HttpResponseParser(HttpParser):
Returns RawResponseMessage"""
def parse_message(self, lines: List[bytes]) -> Any:
line = lines[0].decode('utf-8', 'surrogateescape')
line = lines[0].decode("utf-8", "surrogateescape")
try:
version, status = line.split(None, 1)
except ValueError:
@ -477,13 +570,12 @@ class HttpResponseParser(HttpParser):
try:
status, reason = status.split(None, 1)
except ValueError:
reason = ''
reason = ""
if len(reason) > self.max_line_size:
raise LineTooLong(
'Status line is too long',
str(self.max_line_size),
str(len(reason)))
"Status line is too long", str(self.max_line_size), str(len(reason))
)
# version
match = VERSRE.match(version)
@ -501,39 +593,57 @@ class HttpResponseParser(HttpParser):
raise BadStatusLine(line)
# read headers
(headers, raw_headers,
close, compression, upgrade, chunked) = self.parse_headers(lines)
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines)
if close is None:
close = version_o <= HttpVersion10
return RawResponseMessage(
version_o, status_i, reason.strip(),
headers, raw_headers, close, compression, upgrade, chunked)
version_o,
status_i,
reason.strip(),
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
)
class HttpPayloadParser:
def __init__(self, payload: StreamReader,
length: Optional[int]=None,
chunked: bool=False,
compression: Optional[str]=None,
code: Optional[int]=None,
method: Optional[str]=None,
readall: bool=False,
response_with_body: bool=True,
auto_decompress: bool=True) -> None:
def __init__(
self,
payload: StreamReader,
length: Optional[int] = None,
chunked: bool = False,
compression: Optional[str] = None,
code: Optional[int] = None,
method: Optional[str] = None,
readall: bool = False,
response_with_body: bool = True,
auto_decompress: bool = True,
) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
self._chunk_size = 0
self._chunk_tail = b''
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self.done = False
# payload decompression wrapper
if response_with_body and compression and self._auto_decompress:
real_payload = DeflateBuffer(payload, compression) # type: Union[StreamReader, DeflateBuffer] # noqa
real_payload = DeflateBuffer(
payload, compression
) # type: Union[StreamReader, DeflateBuffer]
else:
real_payload = payload
@ -555,9 +665,10 @@ class HttpPayloadParser:
else:
if readall and code != 204:
self._type = ParseState.PARSE_UNTIL_EOF
elif method in ('PUT', 'POST'):
elif method in ("PUT", "POST"):
internal_logger.warning( # pragma: no cover
'Content-Length or Transfer-Encoding header is required')
"Content-Length or Transfer-Encoding header is required"
)
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
@ -569,15 +680,16 @@ class HttpPayloadParser:
self.payload.feed_eof()
elif self._type == ParseState.PARSE_LENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header.")
"Not enough data for satisfy content length header."
)
elif self._type == ParseState.PARSE_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header.")
"Not enough data for satisfy transfer length header."
)
def feed_data(self,
chunk: bytes,
SEP: bytes=b'\r\n',
CHUNK_EXT: bytes=b';') -> Tuple[bool, bytes]:
def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
@ -588,7 +700,7 @@ class HttpPayloadParser:
self.payload.feed_data(chunk, chunk_len)
if self._length == 0:
self.payload.feed_eof()
return True, b''
return True, b""
else:
self._length = 0
self.payload.feed_data(chunk[:required], required)
@ -599,7 +711,7 @@ class HttpPayloadParser:
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b''
self._chunk_tail = b""
while chunk:
@ -617,11 +729,12 @@ class HttpPayloadParser:
size = int(bytes(size_b), 16)
except ValueError:
exc = TransferEncodingError(
chunk[:pos].decode('ascii', 'surrogateescape'))
chunk[:pos].decode("ascii", "surrogateescape")
)
self.payload.set_exception(exc)
raise exc from None
chunk = chunk[pos+2:]
chunk = chunk[pos + 2 :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
@ -630,7 +743,7 @@ class HttpPayloadParser:
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b''
return False, b""
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
@ -640,7 +753,7 @@ class HttpPayloadParser:
if required > chunk_len:
self._chunk_size = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
return False, b''
return False, b""
else:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
@ -655,34 +768,45 @@ class HttpPayloadParser:
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b''
return False, b""
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
if chunk[:2] == SEP:
head = chunk[:2]
if head == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[2:]
else:
self._chunk = ChunkState.PARSE_TRAILERS
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
# CRLF which marks the end of response might not be
# contained in the same TCP segment which delivered the
# size indicator.
if not head:
return False, b""
if head == SEP[:1]:
self._chunk_tail = head
return False, b""
self._chunk = ChunkState.PARSE_TRAILERS
# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos+2:]
chunk = chunk[pos + 2 :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
return False, b''
return False, b""
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
return False, b''
return False, b""
class DeflateBuffer:
@ -694,38 +818,48 @@ class DeflateBuffer:
self.encoding = encoding
self._started_decoding = False
if encoding == 'br':
if encoding == "br":
if not HAS_BROTLI: # pragma: no cover
raise ContentEncodingError(
'Can not decode content-encoding: brotli (br). '
'Please install `brotlipy`')
"Can not decode content-encoding: brotli (br). "
"Please install `brotlipy`"
)
self.decompressor = brotli.Decompressor()
else:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def set_exception(self, exc: BaseException) -> None:
self.out.set_exception(exc)
def feed_data(self, chunk: bytes, size: int) -> None:
if not size:
return
self.size += size
# RFC1950
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
# bits 4..7 = CINFO = 1..7 = windows size.
if (
not self._started_decoding
and self.encoding == "deflate"
and chunk[0] & 0xF != 8
):
# Change the decoder to decompress incorrectly compressed data
# Actually we should issue a warning about non-RFC-compliant data.
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
if not self._started_decoding and self.encoding == 'deflate':
self.decompressor = zlib.decompressobj()
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
raise ContentEncodingError(
'Can not decode content-encoding: %s' % self.encoding)
else:
raise ContentEncodingError(
'Can not decode content-encoding: %s' % self.encoding)
raise ContentEncodingError(
"Can not decode content-encoding: %s" % self.encoding
)
self._started_decoding = True
if chunk:
self._started_decoding = True
self.out.feed_data(chunk, len(chunk))
def feed_eof(self) -> None:
@ -733,8 +867,8 @@ class DeflateBuffer:
if chunk or self.size > 0:
self.out.feed_data(chunk, len(chunk))
if self.encoding == 'deflate' and not self.decompressor.eof:
raise ContentEncodingError('deflate')
if self.encoding == "deflate" and not self.decompressor.eof:
raise ContentEncodingError("deflate")
self.out.feed_eof()
@ -752,10 +886,13 @@ RawResponseMessagePy = RawResponseMessage
try:
if not NO_EXTENSIONS:
from ._http_parser import (HttpRequestParser, # type: ignore # noqa
HttpResponseParser,
RawRequestMessage,
RawResponseMessage)
from ._http_parser import ( # type: ignore
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
RawResponseMessage,
)
HttpRequestParserC = HttpRequestParser
HttpResponseParserC = HttpResponseParser
RawRequestMessageC = RawRequestMessage

View File

@ -13,12 +13,19 @@ from typing import Any, Callable, List, Optional, Tuple, Union
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
from .log import ws_logger
from .streams import DataQueue
__all__ = ('WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY',
'WebSocketReader', 'WebSocketWriter', 'WSMessage',
'WebSocketError', 'WSMsgType', 'WSCloseCode')
__all__ = (
"WS_CLOSED_MESSAGE",
"WS_CLOSING_MESSAGE",
"WS_KEY",
"WebSocketReader",
"WebSocketWriter",
"WSMessage",
"WebSocketError",
"WSMsgType",
"WSCloseCode",
)
class WSCloseCode(IntEnum):
@ -44,7 +51,7 @@ class WSMsgType(IntEnum):
TEXT = 0x1
BINARY = 0x2
PING = 0x9
PONG = 0xa
PONG = 0xA
CLOSE = 0x8
# aiohttp specific types
@ -62,28 +69,25 @@ class WSMsgType(IntEnum):
error = ERROR
WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
UNPACK_LEN2 = Struct('!H').unpack_from
UNPACK_LEN3 = Struct('!Q').unpack_from
UNPACK_CLOSE_CODE = Struct('!H').unpack
PACK_LEN1 = Struct('!BB').pack
PACK_LEN2 = Struct('!BBH').pack
PACK_LEN3 = Struct('!BBQ').pack
PACK_CLOSE_CODE = Struct('!H').pack
UNPACK_LEN2 = Struct("!H").unpack_from
UNPACK_LEN3 = Struct("!Q").unpack_from
UNPACK_CLOSE_CODE = Struct("!H").unpack
PACK_LEN1 = Struct("!BB").pack
PACK_LEN2 = Struct("!BBH").pack
PACK_LEN3 = Struct("!BBQ").pack
PACK_CLOSE_CODE = Struct("!H").pack
MSG_SIZE = 2 ** 14
DEFAULT_LIMIT = 2 ** 16
_WSMessageBase = collections.namedtuple('_WSMessageBase',
['type', 'data', 'extra'])
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
class WSMessage(_WSMessageBase):
def json(self, *,
loads: Callable[[Any], Any]=json.loads) -> Any:
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
"""Return parsed JSON data.
.. versionadded:: 0.22
@ -146,23 +150,26 @@ if NO_EXTENSIONS: # pragma: no cover
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore
_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
_websocket_mask = _websocket_mask_python
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xff, 0xff])
_WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xFF, 0xFF])
_WS_EXT_RE = re.compile(r'^(?:;\s*(?:'
r'(server_no_context_takeover)|'
r'(client_no_context_takeover)|'
r'(server_max_window_bits(?:=(\d+))?)|'
r'(client_max_window_bits(?:=(\d+))?)))*$')
_WS_EXT_RE = re.compile(
r"^(?:;\s*(?:"
r"(server_no_context_takeover)|"
r"(client_no_context_takeover)|"
r"(server_max_window_bits(?:=(\d+))?)|"
r"(client_max_window_bits(?:=(\d+))?)))*$"
)
_WS_EXT_RE_SPLIT = re.compile(r'permessage-deflate([^,]+)?')
_WS_EXT_RE_SPLIT = re.compile(r"permessage-deflate([^,]+)?")
def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]:
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
if not extstr:
return 0, False
@ -201,37 +208,38 @@ def ws_ext_parse(extstr: str, isserver: bool=False) -> Tuple[int, bool]:
# If compress level not support,
# FAIL the parse progress
if compress > 15 or compress < 9:
raise WSHandshakeError('Invalid window size')
raise WSHandshakeError("Invalid window size")
if match.group(2):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
# Return Fail if client side and not match
elif not isserver:
raise WSHandshakeError('Extension for deflate not supported' +
ext.group(1))
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
return compress, notakeover
def ws_ext_gen(compress: int=15, isserver: bool=False,
server_notakeover: bool=False) -> str:
def ws_ext_gen(
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
) -> str:
# client_notakeover=False not used for server
# compress wbit 8 does not support in zlib
if compress < 9 or compress > 15:
raise ValueError('Compress wbits must between 9 and 15, '
'zlib does not support wbits=8')
enabledext = ['permessage-deflate']
raise ValueError(
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
)
enabledext = ["permessage-deflate"]
if not isserver:
enabledext.append('client_max_window_bits')
enabledext.append("client_max_window_bits")
if compress < 15:
enabledext.append('server_max_window_bits=' + str(compress))
enabledext.append("server_max_window_bits=" + str(compress))
if server_notakeover:
enabledext.append('server_no_context_takeover')
enabledext.append("server_no_context_takeover")
# if client_notakeover:
# enabledext.append('client_no_context_takeover')
return '; '.join(enabledext)
return "; ".join(enabledext)
class WSParserState(IntEnum):
@ -242,9 +250,9 @@ class WSParserState(IntEnum):
class WebSocketReader:
def __init__(self, queue: DataQueue[WSMessage],
max_msg_size: int, compress: bool=True) -> None:
def __init__(
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
) -> None:
self.queue = queue
self._max_msg_size = max_msg_size
@ -257,7 +265,7 @@ class WebSocketReader:
self._frame_opcode = None # type: Optional[int]
self._frame_payload = bytearray()
self._tail = b''
self._tail = b""
self._has_mask = False
self._frame_mask = None # type: Optional[bytes]
self._payload_length = 0
@ -278,7 +286,7 @@ class WebSocketReader:
except Exception as exc:
self._exc = exc
self.queue.set_exception(exc)
return True, b''
return True, b""
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
for fin, opcode, payload, compressed in self.parse_frame(data):
@ -287,41 +295,45 @@ class WebSocketReader:
if opcode == WSMsgType.CLOSE:
if len(payload) >= 2:
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
if (close_code < 3000 and
close_code not in ALLOWED_CLOSE_CODES):
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close code: {}'.format(close_code))
f"Invalid close code: {close_code}",
)
try:
close_message = payload[2:].decode('utf-8')
close_message = payload[2:].decode("utf-8")
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
) from exc
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
elif payload:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Invalid close frame: {} {} {!r}'.format(
fin, opcode, payload))
f"Invalid close frame: {fin} {opcode} {payload!r}",
)
else:
msg = WSMessage(WSMsgType.CLOSE, 0, '')
msg = WSMessage(WSMsgType.CLOSE, 0, "")
self.queue.feed_data(msg, 0)
elif opcode == WSMsgType.PING:
self.queue.feed_data(
WSMessage(WSMsgType.PING, payload, ''), len(payload))
WSMessage(WSMsgType.PING, payload, ""), len(payload)
)
elif opcode == WSMsgType.PONG:
self.queue.feed_data(
WSMessage(WSMsgType.PONG, payload, ''), len(payload))
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
)
elif opcode not in (
WSMsgType.TEXT, WSMsgType.BINARY) and self._opcode is None:
elif (
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
and self._opcode is None
):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Unexpected opcode={!r}".format(opcode))
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
)
else:
# load text/binary
if not fin:
@ -329,12 +341,13 @@ class WebSocketReader:
if opcode != WSMsgType.CONTINUATION:
self._opcode = opcode
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
len(self._partial), self._max_msg_size
),
)
else:
# previous frame was non finished
# we should get continuation opcode
@ -342,8 +355,9 @@ class WebSocketReader:
if opcode != WSMsgType.CONTINUATION:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'The opcode in non-fin frame is expected '
'to be zero, got {!r}'.format(opcode))
"The opcode in non-fin frame is expected "
"to be zero, got {!r}".format(opcode),
)
if opcode == WSMsgType.CONTINUATION:
assert self._opcode is not None
@ -351,28 +365,28 @@ class WebSocketReader:
self._opcode = None
self._partial.extend(payload)
if (self._max_msg_size and
len(self._partial) >= self._max_msg_size):
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size))
len(self._partial), self._max_msg_size
),
)
# Decompress process must to be done after all packets
# received.
if compressed:
self._partial.extend(_WS_DEFLATE_TRAILING)
payload_merged = self._decompressobj.decompress(
self._partial, self._max_msg_size)
self._partial, self._max_msg_size
)
if self._decompressobj.unconsumed_tail:
left = len(self._decompressobj.unconsumed_tail)
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Decompressed message size {} exceeds limit {}"
.format(
self._max_msg_size + left,
self._max_msg_size
)
"Decompressed message size {} exceeds limit {}".format(
self._max_msg_size + left, self._max_msg_size
),
)
else:
payload_merged = bytes(self._partial)
@ -381,27 +395,29 @@ class WebSocketReader:
if opcode == WSMsgType.TEXT:
try:
text = payload_merged.decode('utf-8')
text = payload_merged.decode("utf-8")
self.queue.feed_data(
WSMessage(WSMsgType.TEXT, text, ''), len(text))
WSMessage(WSMsgType.TEXT, text, ""), len(text)
)
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT,
'Invalid UTF-8 text message') from exc
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
) from exc
else:
self.queue.feed_data(
WSMessage(WSMsgType.BINARY, payload_merged, ''),
len(payload_merged))
WSMessage(WSMsgType.BINARY, payload_merged, ""),
len(payload_merged),
)
return False, b''
return False, b""
def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int],
bytearray,
Optional[bool]]]:
def parse_frame(
self, buf: bytes
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
"""Return the next frame from the socket."""
frames = []
if self._tail:
buf, self._tail = self._tail + buf, b''
buf, self._tail = self._tail + buf, b""
start_pos = 0
buf_length = len(buf)
@ -410,7 +426,7 @@ class WebSocketReader:
# read header
if self._state == WSParserState.READ_HEADER:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
data = buf[start_pos : start_pos + 2]
start_pos += 2
first_byte, second_byte = data
@ -418,7 +434,7 @@ class WebSocketReader:
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
opcode = first_byte & 0xF
# frame-fin = %x0 ; more frames of this message follow
# / %x1 ; final frame of this message
@ -433,23 +449,25 @@ class WebSocketReader:
if rsv2 or rsv3 or (rsv1 and not self._compress):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
"Received frame with non-zero reserved bits",
)
if opcode > 0x7 and fin == 0:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received fragmented control frame')
"Received fragmented control frame",
)
has_mask = (second_byte >> 7) & 1
length = second_byte & 0x7f
length = second_byte & 0x7F
# Control frames MUST have a payload
# length of 125 bytes or less
if opcode > 0x7 and length > 125:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Control frame payload cannot be '
'larger than 125 bytes')
"Control frame payload cannot be " "larger than 125 bytes",
)
# Set compress status if last package is FIN
# OR set compress status if this is first fragment
@ -459,7 +477,8 @@ class WebSocketReader:
elif rsv1:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
'Received frame with non-zero reserved bits')
"Received frame with non-zero reserved bits",
)
self._frame_fin = bool(fin)
self._frame_opcode = opcode
@ -474,26 +493,28 @@ class WebSocketReader:
length = self._payload_length_flag
if length == 126:
if buf_length - start_pos >= 2:
data = buf[start_pos:start_pos+2]
data = buf[start_pos : start_pos + 2]
start_pos += 2
length = UNPACK_LEN2(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else WSParserState.READ_PAYLOAD
)
else:
break
elif length > 126:
if buf_length - start_pos >= 8:
data = buf[start_pos:start_pos+8]
data = buf[start_pos : start_pos + 8]
start_pos += 8
length = UNPACK_LEN3(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else WSParserState.READ_PAYLOAD
)
else:
break
else:
@ -501,12 +522,13 @@ class WebSocketReader:
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD)
else WSParserState.READ_PAYLOAD
)
# read payload mask
if self._state == WSParserState.READ_PAYLOAD_MASK:
if buf_length - start_pos >= 4:
self._frame_mask = buf[start_pos:start_pos+4]
self._frame_mask = buf[start_pos : start_pos + 4]
start_pos += 4
self._state = WSParserState.READ_PAYLOAD
else:
@ -523,7 +545,7 @@ class WebSocketReader:
start_pos = buf_length
else:
self._payload_length = 0
payload.extend(buf[start_pos:start_pos+length])
payload.extend(buf[start_pos : start_pos + length])
start_pos = start_pos + length
if self._payload_length == 0:
@ -531,11 +553,9 @@ class WebSocketReader:
assert self._frame_mask is not None
_websocket_mask(self._frame_mask, payload)
frames.append((
self._frame_fin,
self._frame_opcode,
payload,
self._compressed))
frames.append(
(self._frame_fin, self._frame_opcode, payload, self._compressed)
)
self._frame_payload = bytearray()
self._state = WSParserState.READ_HEADER
@ -548,11 +568,17 @@ class WebSocketReader:
class WebSocketWriter:
def __init__(self, protocol: BaseProtocol, transport: asyncio.Transport, *,
use_mask: bool=False, limit: int=DEFAULT_LIMIT,
random: Any=random.Random(),
compress: int=0, notakeover: bool=False) -> None:
def __init__(
self,
protocol: BaseProtocol,
transport: asyncio.Transport,
*,
use_mask: bool = False,
limit: int = DEFAULT_LIMIT,
random: Any = random.Random(),
compress: int = 0,
notakeover: bool = False,
) -> None:
self.protocol = protocol
self.transport = transport
self.use_mask = use_mask
@ -564,11 +590,12 @@ class WebSocketWriter:
self._output_size = 0
self._compressobj = None # type: Any # actually compressobj
async def _send_frame(self, message: bytes, opcode: int,
compress: Optional[int]=None) -> None:
async def _send_frame(
self, message: bytes, opcode: int, compress: Optional[int] = None
) -> None:
"""Send a frame over the websocket with message as its payload."""
if self._closing:
ws_logger.warning('websocket connection is closing.')
if self._closing and not (opcode & WSMsgType.CLOSE):
raise ConnectionResetError("Cannot write to closing transport")
rsv = 0
@ -578,15 +605,18 @@ class WebSocketWriter:
if (compress or self.compress) and opcode < 8:
if compress:
# Do not set self._compress if compressing is for this frame
compressobj = zlib.compressobj(wbits=-compress)
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
else: # self.compress
if not self._compressobj:
self._compressobj = zlib.compressobj(wbits=-self.compress)
self._compressobj = zlib.compressobj(
level=zlib.Z_BEST_SPEED, wbits=-self.compress
)
compressobj = self._compressobj
message = compressobj.compress(message)
message = message + compressobj.flush(
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH)
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
)
if message.endswith(_WS_DEFLATE_TRAILING):
message = message[:-4]
rsv = rsv | 0x40
@ -606,18 +636,18 @@ class WebSocketWriter:
else:
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
if use_mask:
mask = self.randrange(0, 0xffffffff)
mask = mask.to_bytes(4, 'big')
mask = self.randrange(0, 0xFFFFFFFF)
mask = mask.to_bytes(4, "big")
message = bytearray(message)
_websocket_mask(mask, message)
self.transport.write(header + mask + message)
self._write(header + mask + message)
self._output_size += len(header) + len(mask) + len(message)
else:
if len(message) > MSG_SIZE:
self.transport.write(header)
self.transport.write(message)
self._write(header)
self._write(message)
else:
self.transport.write(header + message)
self._write(header + message)
self._output_size += len(header) + len(message)
@ -625,35 +655,44 @@ class WebSocketWriter:
self._output_size = 0
await self.protocol._drain_helper()
async def pong(self, message: bytes=b'') -> None:
def _write(self, data: bytes) -> None:
if self.transport is None or self.transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
self.transport.write(data)
async def pong(self, message: bytes = b"") -> None:
"""Send pong message."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PONG)
async def ping(self, message: bytes=b'') -> None:
async def ping(self, message: bytes = b"") -> None:
"""Send ping message."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PING)
async def send(self, message: Union[str, bytes],
binary: bool=False,
compress: Optional[int]=None) -> None:
async def send(
self,
message: Union[str, bytes],
binary: bool = False,
compress: Optional[int] = None,
) -> None:
"""Send a frame over the websocket with message as its payload."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
if binary:
await self._send_frame(message, WSMsgType.BINARY, compress)
else:
await self._send_frame(message, WSMsgType.TEXT, compress)
async def close(self, code: int=1000, message: bytes=b'') -> None:
async def close(self, code: int = 1000, message: bytes = b"") -> None:
"""Close the websocket, sending the specified code and message."""
if isinstance(message, str):
message = message.encode('utf-8')
message = message.encode("utf-8")
try:
await self._send_frame(
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE)
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
)
finally:
self._closing = True

View File

@ -5,15 +5,15 @@ import collections
import zlib
from typing import Any, Awaitable, Callable, Optional, Union # noqa
from multidict import CIMultiDict # noqa
from multidict import CIMultiDict
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
__all__ = ('StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11')
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
HttpVersion = collections.namedtuple('HttpVersion', ['major', 'minor'])
HttpVersion = collections.namedtuple("HttpVersion", ["major", "minor"])
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
@ -22,11 +22,12 @@ _T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
class StreamWriter(AbstractStreamWriter):
def __init__(self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None) -> None:
def __init__(
self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None,
) -> None:
self._protocol = protocol
self._transport = protocol.transport
@ -53,9 +54,8 @@ class StreamWriter(AbstractStreamWriter):
def enable_chunking(self) -> None:
self.chunked = True
def enable_compression(self, encoding: str='deflate') -> None:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
def enable_compression(self, encoding: str = "deflate") -> None:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
self._compress = zlib.compressobj(wbits=zlib_mode)
def _write(self, chunk: bytes) -> None:
@ -64,11 +64,12 @@ class StreamWriter(AbstractStreamWriter):
self.output_size += size
if self._transport is None or self._transport.is_closing():
raise ConnectionResetError('Cannot write to closing transport')
raise ConnectionResetError("Cannot write to closing transport")
self._transport.write(chunk)
async def write(self, chunk: bytes,
*, drain: bool=True, LIMIT: int=0x10000) -> None:
async def write(
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
) -> None:
"""Writes chunk of data to a stream.
write_eof() indicates end of stream.
@ -78,6 +79,11 @@ class StreamWriter(AbstractStreamWriter):
if self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if isinstance(chunk, memoryview):
if chunk.nbytes != len(chunk):
# just reshape it
chunk = chunk.cast("c")
if self._compress is not None:
chunk = self._compress.compress(chunk)
if not chunk:
@ -88,15 +94,15 @@ class StreamWriter(AbstractStreamWriter):
if self.length >= chunk_len:
self.length = self.length - chunk_len
else:
chunk = chunk[:self.length]
chunk = chunk[: self.length]
self.length = 0
if not chunk:
return
if chunk:
if self.chunked:
chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len_pre + chunk + b'\r\n'
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len_pre + chunk + b"\r\n"
self._write(chunk)
@ -104,14 +110,15 @@ class StreamWriter(AbstractStreamWriter):
self.buffer_size = 0
await self.drain()
async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write request/response status and headers."""
# status + headers
buf = _serialize_headers(status_line, headers)
self._write(buf)
async def write_eof(self, chunk: bytes=b'') -> None:
async def write_eof(self, chunk: bytes = b"") -> None:
if self._eof:
return
@ -124,15 +131,15 @@ class StreamWriter(AbstractStreamWriter):
chunk = chunk + self._compress.flush()
if chunk and self.chunked:
chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
else:
if self.chunked:
if chunk:
chunk_len = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len + chunk + b'\r\n0\r\n\r\n'
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
else:
chunk = b'0\r\n\r\n'
chunk = b"0\r\n\r\n"
if chunk:
self._write(chunk)
@ -154,17 +161,20 @@ class StreamWriter(AbstractStreamWriter):
await self._protocol._drain_helper()
def _py_serialize_headers(status_line: str,
headers: 'CIMultiDict[str]') -> bytes:
line = status_line + '\r\n' + ''.join(
[k + ': ' + v + '\r\n' for k, v in headers.items()])
return line.encode('utf-8') + b'\r\n'
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
line = (
status_line
+ "\r\n"
+ "".join([k + ": " + v + "\r\n" for k, v in headers.items()])
)
return line.encode("utf-8") + b"\r\n"
_serialize_headers = _py_serialize_headers
try:
import aiohttp._http_writer as _http_writer # type: ignore
_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
_serialize_headers = _c_serialize_headers

View File

@ -5,7 +5,7 @@ from typing import Any, Optional
try:
from typing import Deque
except ImportError:
from typing_extensions import Deque # noqa
from typing_extensions import Deque
class EventResultOrError:
@ -15,13 +15,14 @@ class EventResultOrError:
thanks to @vorpalsmith for the simple design.
"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._exc = None # type: Optional[BaseException]
self._event = asyncio.Event(loop=loop)
self._event = asyncio.Event()
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
def set(self, exc: Optional[BaseException]=None) -> None:
def set(self, exc: Optional[BaseException] = None) -> None:
self._exc = exc
self._event.set()

View File

@ -1,8 +1,8 @@
import logging
access_logger = logging.getLogger('aiohttp.access')
client_logger = logging.getLogger('aiohttp.client')
internal_logger = logging.getLogger('aiohttp.internal')
server_logger = logging.getLogger('aiohttp.server')
web_logger = logging.getLogger('aiohttp.web')
ws_logger = logging.getLogger('aiohttp.websocket')
access_logger = logging.getLogger("aiohttp.access")
client_logger = logging.getLogger("aiohttp.client")
internal_logger = logging.getLogger("aiohttp.internal")
server_logger = logging.getLogger("aiohttp.server")
web_logger = logging.getLogger("aiohttp.web")
ws_logger = logging.getLogger("aiohttp.websocket")

View File

@ -7,9 +7,10 @@ import warnings
import zlib
from collections import deque
from types import TracebackType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
Dict,
Iterator,
List,
@ -22,7 +23,7 @@ from typing import ( # noqa
)
from urllib.parse import parse_qsl, unquote, urlencode
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping # noqa
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
from .hdrs import (
CONTENT_DISPOSITION,
@ -44,13 +45,19 @@ from .payload import (
)
from .streams import StreamReader
__all__ = ('MultipartReader', 'MultipartWriter', 'BodyPartReader',
'BadContentDispositionHeader', 'BadContentDispositionParam',
'parse_content_disposition', 'content_disposition_filename')
__all__ = (
"MultipartReader",
"MultipartWriter",
"BodyPartReader",
"BadContentDispositionHeader",
"BadContentDispositionParam",
"parse_content_disposition",
"content_disposition_filename",
)
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import ClientResponse # noqa
from .client_reqrep import ClientResponse
class BadContentDispositionHeader(RuntimeWarning):
@ -61,9 +68,9 @@ class BadContentDispositionParam(RuntimeWarning):
pass
def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
Dict[str, str]]:
def parse_content_disposition(
header: Optional[str],
) -> Tuple[Optional[str], Dict[str, str]]:
def is_token(string: str) -> bool:
return bool(string) and TOKEN >= set(string)
@ -74,23 +81,22 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
return is_token(string) and string.count("'") == 2
def is_extended_param(string: str) -> bool:
return string.endswith('*')
return string.endswith("*")
def is_continuous_param(string: str) -> bool:
pos = string.find('*') + 1
pos = string.find("*") + 1
if not pos:
return False
substring = string[pos:-1] if string.endswith('*') else string[pos:]
substring = string[pos:-1] if string.endswith("*") else string[pos:]
return substring.isdigit()
def unescape(text: str, *,
chars: str=''.join(map(re.escape, CHAR))) -> str:
return re.sub('\\\\([{}])'.format(chars), '\\1', text)
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
return re.sub(f"\\\\([{chars}])", "\\1", text)
if not header:
return None, {}
disptype, *parts = header.split(';')
disptype, *parts = header.split(";")
if not is_token(disptype):
warnings.warn(BadContentDispositionHeader(header))
return None, {}
@ -99,11 +105,11 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
while parts:
item = parts.pop(0)
if '=' not in item:
if "=" not in item:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
key, value = item.split('=', 1)
key, value = item.split("=", 1)
key = key.lower().strip()
value = value.lstrip()
@ -125,13 +131,13 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
elif is_extended_param(key):
if is_rfc5987(value):
encoding, _, value = value.split("'", 2)
encoding = encoding or 'utf-8'
encoding = encoding or "utf-8"
else:
warnings.warn(BadContentDispositionParam(item))
continue
try:
value = unquote(value, encoding, 'strict')
value = unquote(value, encoding, "strict")
except UnicodeDecodeError: # pragma: nocover
warnings.warn(BadContentDispositionParam(item))
continue
@ -140,16 +146,16 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
failed = True
if is_quoted(value):
failed = False
value = unescape(value[1:-1].lstrip('\\/'))
value = unescape(value[1:-1].lstrip("\\/"))
elif is_token(value):
failed = False
elif parts:
# maybe just ; in filename, in any case this is just
# one case fix, for proper fix we need to redesign parser
_value = '%s;%s' % (value, parts[0])
_value = "{};{}".format(value, parts[0])
if is_quoted(_value):
parts.pop(0)
value = unescape(_value[1:-1].lstrip('\\/'))
value = unescape(_value[1:-1].lstrip("\\/"))
failed = False
if failed:
@ -161,9 +167,10 @@ def parse_content_disposition(header: Optional[str]) -> Tuple[Optional[str],
return disptype.lower(), params
def content_disposition_filename(params: Mapping[str, str],
name: str='filename') -> Optional[str]:
name_suf = '%s*' % name
def content_disposition_filename(
params: Mapping[str, str], name: str = "filename"
) -> Optional[str]:
name_suf = "%s*" % name
if not params:
return None
elif name_suf in params:
@ -172,12 +179,12 @@ def content_disposition_filename(params: Mapping[str, str],
return params[name]
else:
parts = []
fnparams = sorted((key, value)
for key, value in params.items()
if key.startswith(name_suf))
fnparams = sorted(
(key, value) for key, value in params.items() if key.startswith(name_suf)
)
for num, (key, value) in enumerate(fnparams):
_, tail = key.split('*', 1)
if tail.endswith('*'):
_, tail = key.split("*", 1)
if tail.endswith("*"):
tail = tail[:-1]
if tail == str(num):
parts.append(value)
@ -185,11 +192,11 @@ def content_disposition_filename(params: Mapping[str, str],
break
if not parts:
return None
value = ''.join(parts)
value = "".join(parts)
if "'" in value:
encoding, _, value = value.split("'", 2)
encoding = encoding or 'utf-8'
return unquote(value, encoding, 'strict')
encoding = encoding or "utf-8"
return unquote(value, encoding, "strict")
return value
@ -202,21 +209,21 @@ class MultipartResponseWrapper:
def __init__(
self,
resp: 'ClientResponse',
stream: 'MultipartReader',
resp: "ClientResponse",
stream: "MultipartReader",
) -> None:
self.resp = resp
self.stream = stream
def __aiter__(self) -> 'MultipartResponseWrapper':
def __aiter__(self) -> "MultipartResponseWrapper":
return self
async def __anext__(
self,
) -> Union['MultipartReader', 'BodyPartReader']:
) -> Union["MultipartReader", "BodyPartReader"]:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
return part
def at_eof(self) -> bool:
@ -225,7 +232,7 @@ class MultipartResponseWrapper:
async def next(
self,
) -> Optional[Union['MultipartReader', 'BodyPartReader']]:
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
"""Emits next multipart reader object."""
item = await self.stream.next()
if self.stream.at_eof():
@ -243,9 +250,9 @@ class BodyPartReader:
chunk_size = 8192
def __init__(self, boundary: bytes,
headers: 'CIMultiDictProxy[str]',
content: StreamReader) -> None:
def __init__(
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
) -> None:
self.headers = headers
self._boundary = boundary
self._content = content
@ -259,13 +266,13 @@ class BodyPartReader:
self._content_eof = 0
self._cache = {} # type: Dict[str, Any]
def __aiter__(self) -> 'BodyPartReader':
return self
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore
async def __anext__(self) -> bytes:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
return part
async def next(self) -> Optional[bytes]:
@ -274,7 +281,7 @@ class BodyPartReader:
return None
return item
async def read(self, *, decode: bool=False) -> bytes:
async def read(self, *, decode: bool = False) -> bytes:
"""Reads body part data.
decode: Decodes data following by encoding
@ -282,21 +289,21 @@ class BodyPartReader:
data remains untouched
"""
if self._at_eof:
return b''
return b""
data = bytearray()
while not self._at_eof:
data.extend((await self.read_chunk(self.chunk_size)))
data.extend(await self.read_chunk(self.chunk_size))
if decode:
return self.decode(data)
return data
async def read_chunk(self, size: int=chunk_size) -> bytes:
async def read_chunk(self, size: int = chunk_size) -> bytes:
"""Reads body part content chunk of the specified size.
size: chunk size
"""
if self._at_eof:
return b''
return b""
if self._length:
chunk = await self._read_chunk_from_length(size)
else:
@ -307,15 +314,15 @@ class BodyPartReader:
self._at_eof = True
if self._at_eof:
clrf = await self._content.readline()
assert b'\r\n' == clrf, \
'reader did not read all the data or it is malformed'
assert (
b"\r\n" == clrf
), "reader did not read all the data or it is malformed"
return chunk
async def _read_chunk_from_length(self, size: int) -> bytes:
# Reads body part content chunk of the specified size.
# The body part must has Content-Length header with proper value.
assert self._length is not None, \
'Content-Length required for chunked read'
assert self._length is not None, "Content-Length required for chunked read"
chunk_size = min(size, self._length - self._read_bytes)
chunk = await self._content.read(chunk_size)
return chunk
@ -323,8 +330,9 @@ class BodyPartReader:
async def _read_chunk_from_stream(self, size: int) -> bytes:
# Reads content chunk of body part with unknown length.
# The Content-Length header for body part is not necessary.
assert size >= len(self._boundary) + 2, \
'Chunk size must be greater or equal than boundary length + 2'
assert (
size >= len(self._boundary) + 2
), "Chunk size must be greater or equal than boundary length + 2"
first_chunk = self._prev_chunk is None
if first_chunk:
self._prev_chunk = await self._content.read(size)
@ -334,7 +342,7 @@ class BodyPartReader:
assert self._content_eof < 3, "Reading after EOF"
assert self._prev_chunk is not None
window = self._prev_chunk + chunk
sub = b'\r\n' + self._boundary
sub = b"\r\n" + self._boundary
if first_chunk:
idx = window.find(sub)
else:
@ -342,12 +350,11 @@ class BodyPartReader:
if idx >= 0:
# pushing boundary back to content
with warnings.catch_warnings():
warnings.filterwarnings("ignore",
category=DeprecationWarning)
warnings.filterwarnings("ignore", category=DeprecationWarning)
self._content.unread_data(window[idx:])
if size > idx:
self._prev_chunk = self._prev_chunk[:idx]
chunk = window[len(self._prev_chunk):idx]
chunk = window[len(self._prev_chunk) : idx]
if not chunk:
self._at_eof = True
result = self._prev_chunk
@ -357,7 +364,7 @@ class BodyPartReader:
async def readline(self) -> bytes:
"""Reads body part by line by line."""
if self._at_eof:
return b''
return b""
if self._unread:
line = self._unread.popleft()
@ -367,14 +374,14 @@ class BodyPartReader:
if line.startswith(self._boundary):
# the very last boundary may not come with \r\n,
# so set single rules for everyone
sline = line.rstrip(b'\r\n')
sline = line.rstrip(b"\r\n")
boundary = self._boundary
last_boundary = self._boundary + b'--'
last_boundary = self._boundary + b"--"
# ensure that we read exactly the boundary, not something alike
if sline == boundary or sline == last_boundary:
self._at_eof = True
self._unread.append(line)
return b''
return b""
else:
next_line = await self._content.readline()
if next_line.startswith(self._boundary):
@ -390,26 +397,23 @@ class BodyPartReader:
while not self._at_eof:
await self.read_chunk(self.chunk_size)
async def text(self, *, encoding: Optional[str]=None) -> str:
async def text(self, *, encoding: Optional[str] = None) -> str:
"""Like read(), but assumes that body part contains text data."""
data = await self.read(decode=True)
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
encoding = encoding or self.get_charset(default='utf-8')
encoding = encoding or self.get_charset(default="utf-8")
return data.decode(encoding)
async def json(self,
*,
encoding: Optional[str]=None) -> Optional[Dict[str, Any]]:
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
"""Like read(), but assumes that body parts contains JSON data."""
data = await self.read(decode=True)
if not data:
return None
encoding = encoding or self.get_charset(default='utf-8')
encoding = encoding or self.get_charset(default="utf-8")
return json.loads(data.decode(encoding))
async def form(self, *,
encoding: Optional[str]=None) -> List[Tuple[str, str]]:
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contains form
urlencoded data.
"""
@ -419,10 +423,12 @@ class BodyPartReader:
if encoding is not None:
real_encoding = encoding
else:
real_encoding = self.get_charset(default='utf-8')
return parse_qsl(data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding)
real_encoding = self.get_charset(default="utf-8")
return parse_qsl(
data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding,
)
def at_eof(self) -> bool:
"""Returns True if the boundary was reached or False otherwise."""
@ -439,35 +445,36 @@ class BodyPartReader:
return data
def _decode_content(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_ENCODING, '').lower()
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
if encoding == 'deflate':
if encoding == "deflate":
return zlib.decompress(data, -zlib.MAX_WBITS)
elif encoding == 'gzip':
elif encoding == "gzip":
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
elif encoding == 'identity':
elif encoding == "identity":
return data
else:
raise RuntimeError('unknown content encoding: {}'.format(encoding))
raise RuntimeError(f"unknown content encoding: {encoding}")
def _decode_content_transfer(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, '').lower()
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
if encoding == 'base64':
if encoding == "base64":
return base64.b64decode(data)
elif encoding == 'quoted-printable':
elif encoding == "quoted-printable":
return binascii.a2b_qp(data)
elif encoding in ('binary', '8bit', '7bit'):
elif encoding in ("binary", "8bit", "7bit"):
return data
else:
raise RuntimeError('unknown content transfer encoding: {}'
''.format(encoding))
raise RuntimeError(
"unknown content transfer encoding: {}" "".format(encoding)
)
def get_charset(self, default: str) -> str:
"""Returns charset parameter from Content-Type header or default."""
ctype = self.headers.get(CONTENT_TYPE, '')
ctype = self.headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
return mimetype.parameters.get('charset', default)
return mimetype.parameters.get("charset", default)
@reify
def name(self) -> Optional[str]:
@ -475,42 +482,38 @@ class BodyPartReader:
if missed or header is malformed.
"""
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'name')
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "name")
@reify
def filename(self) -> Optional[str]:
"""Returns filename specified in Content-Disposition header or None
if missed or header is malformed.
"""
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'filename')
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "filename")
@payload_type(BodyPartReader, order=Order.try_first)
class BodyPartReaderPayload(Payload):
def __init__(self, value: BodyPartReader,
*args: Any, **kwargs: Any) -> None:
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
params = {} # type: Dict[str, str]
if value.name is not None:
params['name'] = value.name
params["name"] = value.name
if value.filename is not None:
params['filename'] = value.filename
params["filename"] = value.filename
if params:
self.set_content_disposition('attachment', True, **params)
self.set_content_disposition("attachment", True, **params)
async def write(self, writer: Any) -> None:
field = self._value
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
while chunk:
await writer.write(field.decode(chunk))
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
class MultipartReader:
@ -524,38 +527,42 @@ class MultipartReader:
#: Body part reader class for non multipart/* content types.
part_reader_cls = BodyPartReader
def __init__(self, headers: Mapping[str, str],
content: StreamReader) -> None:
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
self.headers = headers
self._boundary = ('--' + self._get_boundary()).encode()
self._boundary = ("--" + self._get_boundary()).encode()
self._content = content
self._last_part = None # type: Optional[Union['MultipartReader', BodyPartReader]] # noqa
self._last_part = (
None
) # type: Optional[Union['MultipartReader', BodyPartReader]]
self._at_eof = False
self._at_bof = True
self._unread = [] # type: List[bytes]
def __aiter__(self) -> 'MultipartReader':
return self
def __aiter__(
self,
) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore
async def __anext__(
self,
) -> Union['MultipartReader', BodyPartReader]:
) -> Optional[Union["MultipartReader", BodyPartReader]]:
part = await self.next()
if part is None:
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
return part
@classmethod
def from_response(
cls,
response: 'ClientResponse',
response: "ClientResponse",
) -> MultipartResponseWrapper:
"""Constructs reader instance from HTTP response.
:param response: :class:`~aiohttp.client.ClientResponse` instance
"""
obj = cls.response_wrapper_cls(response, cls(response.headers,
response.content))
obj = cls.response_wrapper_cls(
response, cls(response.headers, response.content)
)
return obj
def at_eof(self) -> bool:
@ -566,7 +573,7 @@ class MultipartReader:
async def next(
self,
) -> Optional[Union['MultipartReader', BodyPartReader]]:
) -> Optional[Union["MultipartReader", BodyPartReader]]:
"""Emits the next multipart body part."""
# So, if we're at BOF, we need to skip till the boundary.
if self._at_eof:
@ -592,24 +599,24 @@ class MultipartReader:
async def fetch_next_part(
self,
) -> Union['MultipartReader', BodyPartReader]:
) -> Union["MultipartReader", BodyPartReader]:
"""Returns the next body part reader."""
headers = await self._read_headers()
return self._get_part_reader(headers)
def _get_part_reader(
self,
headers: 'CIMultiDictProxy[str]',
) -> Union['MultipartReader', BodyPartReader]:
headers: "CIMultiDictProxy[str]",
) -> Union["MultipartReader", BodyPartReader]:
"""Dispatches the response by the `Content-Type` header, returning
suitable reader instance.
:param dict headers: Response headers
"""
ctype = headers.get(CONTENT_TYPE, '')
ctype = headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
if mimetype.type == 'multipart':
if mimetype.type == "multipart":
if self.multipart_reader_cls is None:
return type(self)(headers, self._content)
return self.multipart_reader_cls(headers, self._content)
@ -619,18 +626,16 @@ class MultipartReader:
def _get_boundary(self) -> str:
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
assert mimetype.type == 'multipart', (
'multipart/* content type expected'
)
assert mimetype.type == "multipart", "multipart/* content type expected"
if 'boundary' not in mimetype.parameters:
raise ValueError('boundary missed for Content-Type: %s'
% self.headers[CONTENT_TYPE])
if "boundary" not in mimetype.parameters:
raise ValueError(
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
)
boundary = mimetype.parameters['boundary']
boundary = mimetype.parameters["boundary"]
if len(boundary) > 70:
raise ValueError('boundary %r is too long (70 chars max)'
% boundary)
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
return boundary
@ -642,13 +647,14 @@ class MultipartReader:
async def _read_until_first_boundary(self) -> None:
while True:
chunk = await self._readline()
if chunk == b'':
raise ValueError("Could not find starting boundary %r"
% (self._boundary))
if chunk == b"":
raise ValueError(
"Could not find starting boundary %r" % (self._boundary)
)
chunk = chunk.rstrip()
if chunk == self._boundary:
return
elif chunk == self._boundary + b'--':
elif chunk == self._boundary + b"--":
self._at_eof = True
return
@ -656,7 +662,7 @@ class MultipartReader:
chunk = (await self._readline()).rstrip()
if chunk == self._boundary:
pass
elif chunk == self._boundary + b'--':
elif chunk == self._boundary + b"--":
self._at_eof = True
epilogue = await self._readline()
next_line = await self._readline()
@ -665,7 +671,7 @@ class MultipartReader:
# parent multipart boundary, if the parent boundary is found then
# it should be marked as unread and handed to the parent for
# processing
if next_line[:2] == b'--':
if next_line[:2] == b"--":
self._unread.append(next_line)
# otherwise the request is likely missing an epilogue and both
# lines should be passed to the parent for processing
@ -673,11 +679,10 @@ class MultipartReader:
else:
self._unread.extend([next_line, epilogue])
else:
raise ValueError('Invalid boundary %r, expected %r'
% (chunk, self._boundary))
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
async def _read_headers(self) -> 'CIMultiDictProxy[str]':
lines = [b'']
async def _read_headers(self) -> "CIMultiDictProxy[str]":
lines = [b""]
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
@ -703,8 +708,7 @@ _Part = Tuple[Payload, str, str]
class MultipartWriter(Payload):
"""Multipart body writer."""
def __init__(self, subtype: str='mixed',
boundary: Optional[str]=None) -> None:
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
boundary = boundary if boundary is not None else uuid.uuid4().hex
# The underlying Payload API demands a str (utf-8), not bytes,
# so we need to ensure we don't lose anything during conversion.
@ -712,24 +716,24 @@ class MultipartWriter(Payload):
# In both situations.
try:
self._boundary = boundary.encode('ascii')
self._boundary = boundary.encode("ascii")
except UnicodeEncodeError:
raise ValueError('boundary should contain ASCII only chars') \
from None
ctype = ('multipart/{}; boundary={}'
.format(subtype, self._boundary_value))
raise ValueError("boundary should contain ASCII only chars") from None
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
super().__init__(None, content_type=ctype)
self._parts = [] # type: List[_Part] # noqa
self._parts = [] # type: List[_Part]
def __enter__(self) -> 'MultipartWriter':
def __enter__(self) -> "MultipartWriter":
return self
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType]) -> None:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
pass
def __iter__(self) -> Iterator[_Part]:
@ -765,26 +769,22 @@ class MultipartWriter(Payload):
# VCHAR = %x21-7E
value = self._boundary
if re.match(self._valid_tchar_regex, value):
return value.decode('ascii') # cannot fail
return value.decode("ascii") # cannot fail
if re.search(self._invalid_qdtext_char_regex, value):
raise ValueError("boundary value contains invalid characters")
# escape %x5C and %x22
quoted_value_content = value.replace(b'\\', b'\\\\')
quoted_value_content = value.replace(b"\\", b"\\\\")
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
return '"' + quoted_value_content.decode('ascii') + '"'
return '"' + quoted_value_content.decode("ascii") + '"'
@property
def boundary(self) -> str:
return self._boundary.decode('ascii')
return self._boundary.decode("ascii")
def append(
self,
obj: Any,
headers: Optional[MultiMapping[str]]=None
) -> Payload:
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
if headers is None:
headers = CIMultiDict()
@ -795,7 +795,7 @@ class MultipartWriter(Payload):
try:
payload = get_payload(obj, headers=headers)
except LookupError:
raise TypeError('Cannot create payload from %r' % obj)
raise TypeError("Cannot create payload from %r" % obj)
else:
return self.append_payload(payload)
@ -804,22 +804,23 @@ class MultipartWriter(Payload):
# compression
encoding = payload.headers.get(
CONTENT_ENCODING,
'',
"",
).lower() # type: Optional[str]
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
raise RuntimeError('unknown content encoding: {}'.format(encoding))
if encoding == 'identity':
if encoding and encoding not in ("deflate", "gzip", "identity"):
raise RuntimeError(f"unknown content encoding: {encoding}")
if encoding == "identity":
encoding = None
# te encoding
te_encoding = payload.headers.get(
CONTENT_TRANSFER_ENCODING,
'',
"",
).lower() # type: Optional[str]
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
raise RuntimeError('unknown content transfer encoding: {}'
''.format(te_encoding))
if te_encoding == 'binary':
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
raise RuntimeError(
"unknown content transfer encoding: {}" "".format(te_encoding)
)
if te_encoding == "binary":
te_encoding = None
# size
@ -831,9 +832,7 @@ class MultipartWriter(Payload):
return payload
def append_json(
self,
obj: Any,
headers: Optional[MultiMapping[str]]=None
self, obj: Any, headers: Optional[MultiMapping[str]] = None
) -> Payload:
"""Helper to append JSON part."""
if headers is None:
@ -842,10 +841,9 @@ class MultipartWriter(Payload):
return self.append_payload(JsonPayload(obj, headers=headers))
def append_form(
self,
obj: Union[Sequence[Tuple[str, str]],
Mapping[str, str]],
headers: Optional[MultiMapping[str]]=None
self,
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
headers: Optional[MultiMapping[str]] = None,
) -> Payload:
"""Helper to append form urlencoded part."""
assert isinstance(obj, (Sequence, Mapping))
@ -858,8 +856,10 @@ class MultipartWriter(Payload):
data = urlencode(obj, doseq=True)
return self.append_payload(
StringPayload(data, headers=headers,
content_type='application/x-www-form-urlencoded'))
StringPayload(
data, headers=headers, content_type="application/x-www-form-urlencoded"
)
)
@property
def size(self) -> Optional[int]:
@ -870,19 +870,21 @@ class MultipartWriter(Payload):
return None
total += int(
2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n'
part.size + len(part._binary_headers) +
2 # b'\r\n'
2
+ len(self._boundary)
+ 2
+ part.size # b'--'+self._boundary+b'\r\n'
+ len(part._binary_headers)
+ 2 # b'\r\n'
)
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
return total
async def write(self, writer: Any,
close_boundary: bool=True) -> None:
async def write(self, writer: Any, close_boundary: bool = True) -> None:
"""Write body."""
for part, encoding, te_encoding in self._parts:
await writer.write(b'--' + self._boundary + b'\r\n')
await writer.write(b"--" + self._boundary + b"\r\n")
await writer.write(part._binary_headers)
if encoding or te_encoding:
@ -896,14 +898,13 @@ class MultipartWriter(Payload):
else:
await part.write(writer)
await writer.write(b'\r\n')
await writer.write(b"\r\n")
if close_boundary:
await writer.write(b'--' + self._boundary + b'--\r\n')
await writer.write(b"--" + self._boundary + b"--\r\n")
class MultipartPayloadWriter:
def __init__(self, writer: Any) -> None:
self._writer = writer
self._encoding = None # type: Optional[str]
@ -911,15 +912,14 @@ class MultipartPayloadWriter:
self._encoding_buffer = None # type: Optional[bytearray]
def enable_encoding(self, encoding: str) -> None:
if encoding == 'base64':
if encoding == "base64":
self._encoding = encoding
self._encoding_buffer = bytearray()
elif encoding == 'quoted-printable':
self._encoding = 'quoted-printable'
elif encoding == "quoted-printable":
self._encoding = "quoted-printable"
def enable_compression(self, encoding: str='deflate') -> None:
zlib_mode = (16 + zlib.MAX_WBITS
if encoding == 'gzip' else -zlib.MAX_WBITS)
def enable_compression(self, encoding: str = "deflate") -> None:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
self._compress = zlib.compressobj(wbits=zlib_mode)
async def write_eof(self) -> None:
@ -929,10 +929,9 @@ class MultipartPayloadWriter:
self._compress = None
await self.write(chunk)
if self._encoding == 'base64':
if self._encoding == "base64":
if self._encoding_buffer:
await self._writer.write(base64.b64encode(
self._encoding_buffer))
await self._writer.write(base64.b64encode(self._encoding_buffer))
async def write(self, chunk: bytes) -> None:
if self._compress is not None:
@ -941,19 +940,18 @@ class MultipartPayloadWriter:
if not chunk:
return
if self._encoding == 'base64':
if self._encoding == "base64":
buf = self._encoding_buffer
assert buf is not None
buf.extend(chunk)
if buf:
div, mod = divmod(len(buf), 3)
enc_chunk, self._encoding_buffer = (
buf[:div * 3], buf[div * 3:])
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
if enc_chunk:
b64chunk = base64.b64encode(enc_chunk)
await self._writer.write(b64chunk)
elif self._encoding == 'quoted-printable':
elif self._encoding == "quoted-printable":
await self._writer.write(binascii.b2a_qp(chunk))
else:
await self._writer.write(chunk)

View File

@ -33,20 +33,30 @@ from .helpers import (
parse_mimetype,
sentinel,
)
from .streams import DEFAULT_LIMIT, StreamReader
from .streams import StreamReader
from .typedefs import JSONEncoder, _CIMultiDict
__all__ = ('PAYLOAD_REGISTRY', 'get_payload', 'payload_type', 'Payload',
'BytesPayload', 'StringPayload',
'IOBasePayload', 'BytesIOPayload', 'BufferedReaderPayload',
'TextIOPayload', 'StringIOPayload', 'JsonPayload',
'AsyncIterablePayload')
__all__ = (
"PAYLOAD_REGISTRY",
"get_payload",
"payload_type",
"Payload",
"BytesPayload",
"StringPayload",
"IOBasePayload",
"BytesIOPayload",
"BufferedReaderPayload",
"TextIOPayload",
"StringIOPayload",
"JsonPayload",
"AsyncIterablePayload",
)
TOO_LARGE_BYTES_BODY = 2 ** 20 # 1 MB
if TYPE_CHECKING: # pragma: no cover
from typing import List # noqa
from typing import List
class LookupError(Exception):
@ -54,29 +64,27 @@ class LookupError(Exception):
class Order(str, enum.Enum):
normal = 'normal'
try_first = 'try_first'
try_last = 'try_last'
normal = "normal"
try_first = "try_first"
try_last = "try_last"
def get_payload(data: Any, *args: Any, **kwargs: Any) -> 'Payload':
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
def register_payload(factory: Type['Payload'],
type: Any,
*,
order: Order=Order.normal) -> None:
def register_payload(
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
) -> None:
PAYLOAD_REGISTRY.register(factory, type, order=order)
class payload_type:
def __init__(self, type: Any, *, order: Order=Order.normal) -> None:
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
self.type = type
self.order = order
def __call__(self, factory: Type['Payload']) -> Type['Payload']:
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
register_payload(factory, self.type, order=self.order)
return factory
@ -92,11 +100,9 @@ class PayloadRegistry:
self._normal = [] # type: List[Tuple[Type[Payload], Any]]
self._last = [] # type: List[Tuple[Type[Payload], Any]]
def get(self,
data: Any,
*args: Any,
_CHAIN: Any=chain,
**kwargs: Any) -> 'Payload':
def get(
self, data: Any, *args: Any, _CHAIN: Any = chain, **kwargs: Any
) -> "Payload":
if isinstance(data, Payload):
return data
for factory, type in _CHAIN(self._first, self._normal, self._last):
@ -105,11 +111,9 @@ class PayloadRegistry:
raise LookupError()
def register(self,
factory: Type['Payload'],
type: Any,
*,
order: Order=Order.normal) -> None:
def register(
self, factory: Type["Payload"], type: Any, *, order: Order = Order.normal
) -> None:
if order is Order.try_first:
self._first.append((factory, type))
elif order is Order.normal:
@ -117,27 +121,25 @@ class PayloadRegistry:
elif order is Order.try_last:
self._last.append((factory, type))
else:
raise ValueError("Unsupported order {!r}".format(order))
raise ValueError(f"Unsupported order {order!r}")
class Payload(ABC):
_default_content_type = 'application/octet-stream' # type: str
_default_content_type = "application/octet-stream" # type: str
_size = None # type: Optional[int]
def __init__(self,
value: Any,
headers: Optional[
Union[
_CIMultiDict,
Dict[str, str],
Iterable[Tuple[str, str]]
]
] = None,
content_type: Optional[str]=sentinel,
filename: Optional[str]=None,
encoding: Optional[str]=None,
**kwargs: Any) -> None:
def __init__(
self,
value: Any,
headers: Optional[
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
] = None,
content_type: Optional[str] = sentinel,
filename: Optional[str] = None,
encoding: Optional[str] = None,
**kwargs: Any,
) -> None:
self._encoding = encoding
self._filename = filename
self._headers = CIMultiDict() # type: _CIMultiDict
@ -170,9 +172,12 @@ class Payload(ABC):
@property
def _binary_headers(self) -> bytes:
return ''.join(
[k + ': ' + v + '\r\n' for k, v in self.headers.items()]
).encode('utf-8') + b'\r\n'
return (
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
"utf-8"
)
+ b"\r\n"
)
@property
def encoding(self) -> Optional[str]:
@ -184,13 +189,13 @@ class Payload(ABC):
"""Content type"""
return self._headers[hdrs.CONTENT_TYPE]
def set_content_disposition(self,
disptype: str,
quote_fields: bool=True,
**params: Any) -> None:
def set_content_disposition(
self, disptype: str, quote_fields: bool = True, **params: Any
) -> None:
"""Sets ``Content-Disposition`` header."""
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, **params)
disptype, quote_fields=quote_fields, **params
)
@abstractmethod
async def write(self, writer: AbstractStreamWriter) -> None:
@ -201,55 +206,59 @@ class Payload(ABC):
class BytesPayload(Payload):
def __init__(self,
value: ByteString,
*args: Any,
**kwargs: Any) -> None:
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, (bytes, bytearray, memoryview)):
raise TypeError("value argument must be byte-ish, not {!r}"
.format(type(value)))
raise TypeError(
"value argument must be byte-ish, not {!r}".format(type(value))
)
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
self._size = len(value)
if isinstance(value, memoryview):
self._size = value.nbytes
else:
self._size = len(value)
if self._size > TOO_LARGE_BYTES_BODY:
if PY_36:
kwargs = {'source': self}
kwargs = {"source": self}
else:
kwargs = {}
warnings.warn("Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead", ResourceWarning,
**kwargs)
warnings.warn(
"Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead",
ResourceWarning,
**kwargs,
)
async def write(self, writer: AbstractStreamWriter) -> None:
await writer.write(self._value)
class StringPayload(BytesPayload):
def __init__(self,
value: Text,
*args: Any,
encoding: Optional[str]=None,
content_type: Optional[str]=None,
**kwargs: Any) -> None:
def __init__(
self,
value: Text,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
real_encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
real_encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
real_encoding = mimetype.parameters.get('charset', 'utf-8')
real_encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
content_type = "text/plain; charset=%s" % encoding
real_encoding = encoding
super().__init__(
@ -262,66 +271,54 @@ class StringPayload(BytesPayload):
class StringIOPayload(StringPayload):
def __init__(self,
value: IO[str],
*args: Any,
**kwargs: Any) -> None:
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
super().__init__(value.read(), *args, **kwargs)
class IOBasePayload(Payload):
def __init__(self,
value: IO[Any],
disposition: str='attachment',
*args: Any,
**kwargs: Any) -> None:
if 'filename' not in kwargs:
kwargs['filename'] = guess_filename(value)
def __init__(
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
) -> None:
if "filename" not in kwargs:
kwargs["filename"] = guess_filename(value)
super().__init__(value, *args, **kwargs)
if self._filename is not None and disposition is not None:
if hdrs.CONTENT_DISPOSITION not in self.headers:
self.set_content_disposition(
disposition, filename=self._filename
)
self.set_content_disposition(disposition, filename=self._filename)
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
while chunk:
await writer.write(chunk)
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
finally:
await loop.run_in_executor(None, self._value.close)
class TextIOPayload(IOBasePayload):
def __init__(self,
value: TextIO,
*args: Any,
encoding: Optional[str]=None,
content_type: Optional[str]=None,
**kwargs: Any) -> None:
def __init__(
self,
value: TextIO,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
encoding = 'utf-8'
content_type = 'text/plain; charset=utf-8'
encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get('charset', 'utf-8')
encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = 'text/plain; charset=%s' % encoding
content_type = "text/plain; charset=%s" % encoding
super().__init__(
value,
@ -341,20 +338,15 @@ class TextIOPayload(IOBasePayload):
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
while chunk:
await writer.write(chunk.encode(self._encoding))
chunk = await loop.run_in_executor(
None, self._value.read, DEFAULT_LIMIT
)
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
finally:
await loop.run_in_executor(None, self._value.close)
class BytesIOPayload(IOBasePayload):
@property
def size(self) -> int:
position = self._value.tell()
@ -364,7 +356,6 @@ class BytesIOPayload(IOBasePayload):
class BufferedReaderPayload(IOBasePayload):
@property
def size(self) -> Optional[int]:
try:
@ -376,22 +367,27 @@ class BufferedReaderPayload(IOBasePayload):
class JsonPayload(BytesPayload):
def __init__(self,
value: Any,
encoding: str='utf-8',
content_type: str='application/json',
dumps: JSONEncoder=json.dumps,
*args: Any,
**kwargs: Any) -> None:
def __init__(
self,
value: Any,
encoding: str = "utf-8",
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(
dumps(value).encode(encoding),
content_type=content_type, encoding=encoding, *args, **kwargs)
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
if TYPE_CHECKING: # pragma: no cover
from typing import AsyncIterator, AsyncIterable
from typing import AsyncIterable, AsyncIterator
_AsyncIterator = AsyncIterator[bytes]
_AsyncIterable = AsyncIterable[bytes]
@ -406,17 +402,16 @@ class AsyncIterablePayload(Payload):
_iter = None # type: Optional[_AsyncIterator]
def __init__(self,
value: _AsyncIterable,
*args: Any,
**kwargs: Any) -> None:
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError("value argument must support "
"collections.abc.AsyncIterablebe interface, "
"got {!r}".format(type(value)))
raise TypeError(
"value argument must support "
"collections.abc.AsyncIterablebe interface, "
"got {!r}".format(type(value))
)
if 'content_type' not in kwargs:
kwargs['content_type'] = 'application/octet-stream'
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
@ -435,7 +430,6 @@ class AsyncIterablePayload(Payload):
class StreamReaderPayload(AsyncIterablePayload):
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value.iter_any(), *args, **kwargs)
@ -446,11 +440,9 @@ PAYLOAD_REGISTRY.register(StringPayload, str)
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
PAYLOAD_REGISTRY.register(
BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
# try_last for giving a chance to more specialized async interables like
# multidict.BodyPartReaderPayload override the default
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable,
order=Order.try_last)
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)

View File

@ -21,36 +21,38 @@ Then you can use `file_sender` like this:
"""
import asyncio
import types
import warnings
from typing import Any, Awaitable, Callable, Dict, Tuple
from .abc import AbstractStreamWriter
from .payload import Payload, payload_type
__all__ = ('streamer',)
__all__ = ("streamer",)
class _stream_wrapper:
def __init__(self,
coro: Callable[..., Awaitable[None]],
args: Tuple[Any, ...],
kwargs: Dict[str, Any]) -> None:
self.coro = asyncio.coroutine(coro)
def __init__(
self,
coro: Callable[..., Awaitable[None]],
args: Tuple[Any, ...],
kwargs: Dict[str, Any],
) -> None:
self.coro = types.coroutine(coro)
self.args = args
self.kwargs = kwargs
async def __call__(self, writer: AbstractStreamWriter) -> None:
await self.coro(writer, *self.args, **self.kwargs)
await self.coro(writer, *self.args, **self.kwargs) # type: ignore
class streamer:
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
warnings.warn("@streamer is deprecated, use async generators instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"@streamer is deprecated, use async generators instead",
DeprecationWarning,
stacklevel=2,
)
self.coro = coro
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
@ -59,14 +61,12 @@ class streamer:
@payload_type(_stream_wrapper)
class StreamWrapperPayload(Payload):
async def write(self, writer: AbstractStreamWriter) -> None:
await self._value(writer)
@payload_type(streamer)
class StreamPayload(StreamWrapperPayload):
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
super().__init__(value(), *args, **kwargs)

View File

@ -1 +1 @@
Marker
Marker

View File

@ -16,8 +16,8 @@ from .test_utils import (
loop_context,
setup_test_loop,
teardown_test_loop,
unused_port as _unused_port,
)
from .test_utils import unused_port as _unused_port
try:
import uvloop
@ -32,14 +32,23 @@ except ImportError: # pragma: no cover
def pytest_addoption(parser): # type: ignore
parser.addoption(
'--aiohttp-fast', action='store_true', default=False,
help='run tests faster by disabling extra checks')
"--aiohttp-fast",
action="store_true",
default=False,
help="run tests faster by disabling extra checks",
)
parser.addoption(
'--aiohttp-loop', action='store', default='pyloop',
help='run tests with specific loop: pyloop, uvloop, tokio or all')
"--aiohttp-loop",
action="store",
default="pyloop",
help="run tests with specific loop: pyloop, uvloop, tokio or all",
)
parser.addoption(
'--aiohttp-enable-loop-debug', action='store_true', default=False,
help='enable event loop debug mode')
"--aiohttp-enable-loop-debug",
action="store_true",
default=False,
help="enable event loop debug mode",
)
def pytest_fixture_setup(fixturedef): # type: ignore
@ -59,25 +68,25 @@ def pytest_fixture_setup(fixturedef): # type: ignore
return
strip_request = False
if 'request' not in fixturedef.argnames:
fixturedef.argnames += ('request',)
if "request" not in fixturedef.argnames:
fixturedef.argnames += ("request",)
strip_request = True
def wrapper(*args, **kwargs): # type: ignore
request = kwargs['request']
request = kwargs["request"]
if strip_request:
del kwargs['request']
del kwargs["request"]
# if neither the fixture nor the test use the 'loop' fixture,
# 'getfixturevalue' will fail because the test is not parameterized
# (this can be removed someday if 'loop' is no longer parameterized)
if 'loop' not in request.fixturenames:
if "loop" not in request.fixturenames:
raise Exception(
"Asynchronous fixtures must depend on the 'loop' fixture or "
"be used in tests depending from it."
)
_loop = request.getfixturevalue('loop')
_loop = request.getfixturevalue("loop")
if is_async_gen:
# for async generators, we need to advance the generator once,
@ -87,7 +96,7 @@ def pytest_fixture_setup(fixturedef): # type: ignore
def finalizer(): # type: ignore
try:
return _loop.run_until_complete(gen.__anext__())
except StopAsyncIteration: # NOQA
except StopAsyncIteration:
pass
request.addfinalizer(finalizer)
@ -101,13 +110,13 @@ def pytest_fixture_setup(fixturedef): # type: ignore
@pytest.fixture
def fast(request): # type: ignore
"""--fast config option"""
return request.config.getoption('--aiohttp-fast')
return request.config.getoption("--aiohttp-fast")
@pytest.fixture
def loop_debug(request): # type: ignore
"""--enable-loop-debug config option"""
return request.config.getoption('--aiohttp-enable-loop-debug')
return request.config.getoption("--aiohttp-enable-loop-debug")
@contextlib.contextmanager
@ -120,15 +129,17 @@ def _runtime_warning_context(): # type: ignore
"""
with warnings.catch_warnings(record=True) as _warnings:
yield
rw = ['{w.filename}:{w.lineno}:{w.message}'.format(w=w)
for w in _warnings # type: ignore
if w.category == RuntimeWarning]
rw = [
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
for w in _warnings
if w.category == RuntimeWarning
]
if rw:
raise RuntimeError('{} Runtime Warning{},\n{}'.format(
len(rw),
'' if len(rw) == 1 else 's',
'\n'.join(rw)
))
raise RuntimeError(
"{} Runtime Warning{},\n{}".format(
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
)
)
@contextlib.contextmanager
@ -161,48 +172,52 @@ def pytest_pyfunc_call(pyfuncitem): # type: ignore
"""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get('proactor_loop')\
or pyfuncitem.funcargs.get('loop', None)
existing_loop = pyfuncitem.funcargs.get(
"proactor_loop"
) or pyfuncitem.funcargs.get("loop", None)
with _runtime_warning_context():
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
testargs = {arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames}
testargs = {
arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames
}
_loop.run_until_complete(pyfuncitem.obj(**testargs))
return True
def pytest_generate_tests(metafunc): # type: ignore
if 'loop_factory' not in metafunc.fixturenames:
if "loop_factory" not in metafunc.fixturenames:
return
loops = metafunc.config.option.aiohttp_loop
avail_factories = {'pyloop': asyncio.DefaultEventLoopPolicy}
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
if uvloop is not None: # pragma: no cover
avail_factories['uvloop'] = uvloop.EventLoopPolicy
avail_factories["uvloop"] = uvloop.EventLoopPolicy
if tokio is not None: # pragma: no cover
avail_factories['tokio'] = tokio.EventLoopPolicy
avail_factories["tokio"] = tokio.EventLoopPolicy
if loops == 'all':
loops = 'pyloop,uvloop?,tokio?'
if loops == "all":
loops = "pyloop,uvloop?,tokio?"
factories = {} # type: ignore
for name in loops.split(','):
required = not name.endswith('?')
name = name.strip(' ?')
for name in loops.split(","):
required = not name.endswith("?")
name = name.strip(" ?")
if name not in avail_factories: # pragma: no cover
if required:
raise ValueError(
"Unknown loop '%s', available loops: %s" % (
name, list(factories.keys())))
"Unknown loop '%s', available loops: %s"
% (name, list(factories.keys()))
)
else:
continue
factories[name] = avail_factories[name]
metafunc.parametrize("loop_factory",
list(factories.values()),
ids=list(factories.keys()))
metafunc.parametrize(
"loop_factory", list(factories.values()), ids=list(factories.keys())
)
@pytest.fixture
@ -233,8 +248,11 @@ def proactor_loop(): # type: ignore
@pytest.fixture
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning)
warnings.warn(
"Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_unused_port
@ -269,8 +287,11 @@ def aiohttp_server(loop): # type: ignore
@pytest.fixture
def test_server(aiohttp_server): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_server fixture instead",
DeprecationWarning)
warnings.warn(
"Deprecated, use aiohttp_server fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_server
@ -299,8 +320,11 @@ def aiohttp_raw_server(loop): # type: ignore
@pytest.fixture
def raw_test_server(aiohttp_raw_server): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_raw_server fixture instead",
DeprecationWarning)
warnings.warn(
"Deprecated, use aiohttp_raw_server fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_raw_server
@ -316,8 +340,9 @@ def aiohttp_client(loop): # type: ignore
async def go(__param, *args, server_kwargs=None, **kwargs): # type: ignore
if (isinstance(__param, Callable) and # type: ignore
not isinstance(__param, (Application, BaseTestServer))):
if isinstance(__param, Callable) and not isinstance( # type: ignore
__param, (Application, BaseTestServer)
):
__param = __param(loop, *args, **kwargs)
kwargs = {}
else:
@ -347,6 +372,9 @@ def aiohttp_client(loop): # type: ignore
@pytest.fixture
def test_client(aiohttp_client): # type: ignore # pragma: no cover
warnings.warn("Deprecated, use aiohttp_client fixture instead",
DeprecationWarning)
warnings.warn(
"Deprecated, use aiohttp_client fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_client

View File

@ -5,10 +5,11 @@ from typing import Any, Dict, List, Optional
from .abc import AbstractResolver
from .helpers import get_running_loop
__all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver')
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
try:
import aiodns
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
except ImportError: # pragma: no cover
aiodns = None
@ -21,21 +22,42 @@ class ThreadedResolver(AbstractResolver):
concurrent.futures.ThreadPoolExecutor.
"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._loop = get_running_loop(loop)
async def resolve(self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
async def resolve(
self, hostname: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
host, port, type=socket.SOCK_STREAM, family=family)
hostname,
port,
type=socket.SOCK_STREAM,
family=family,
flags=socket.AI_ADDRCONFIG,
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6 and address[3]: # type: ignore
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
host, _port = socket.getnameinfo(
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
)
port = int(_port)
else:
host, port = address[:2]
hosts.append(
{'hostname': host,
'host': address[0], 'port': address[1],
'family': family, 'proto': proto,
'flags': socket.AI_NUMERICHOST})
{
"hostname": hostname,
"host": host,
"port": port,
"family": family,
"proto": proto,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
return hosts
@ -46,20 +68,25 @@ class ThreadedResolver(AbstractResolver):
class AsyncResolver(AbstractResolver):
"""Use the `aiodns` package to make asynchronous DNS lookups"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop]=None,
*args: Any, **kwargs: Any) -> None:
def __init__(
self,
loop: Optional[asyncio.AbstractEventLoop] = None,
*args: Any,
**kwargs: Any
) -> None:
if aiodns is None:
raise RuntimeError("Resolver requires aiodns library")
self._loop = get_running_loop(loop)
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
if not hasattr(self._resolver, 'gethostbyname'):
if not hasattr(self._resolver, "gethostbyname"):
# aiodns 1.1 is not available, fallback to DNSResolver.query
self.resolve = self._resolve_with_query # type: ignore
async def resolve(self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
async def resolve(
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
try:
resp = await self._resolver.gethostbyname(host, family)
except aiodns.error.DNSError as exc:
@ -68,10 +95,15 @@ class AsyncResolver(AbstractResolver):
hosts = []
for address in resp.addresses:
hosts.append(
{'hostname': host,
'host': address, 'port': port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST})
{
"hostname": host,
"host": address,
"port": port,
"family": family,
"proto": 0,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
if not hosts:
raise OSError("DNS lookup failed")
@ -79,12 +111,12 @@ class AsyncResolver(AbstractResolver):
return hosts
async def _resolve_with_query(
self, host: str, port: int=0,
family: int=socket.AF_INET) -> List[Dict[str, Any]]:
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
if family == socket.AF_INET6:
qtype = 'AAAA'
qtype = "AAAA"
else:
qtype = 'A'
qtype = "A"
try:
resp = await self._resolver.query(host, qtype)
@ -95,10 +127,15 @@ class AsyncResolver(AbstractResolver):
hosts = []
for rr in resp:
hosts.append(
{'hostname': host,
'host': rr.host, 'port': port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST})
{
"hostname": host,
"host": rr.host,
"port": port,
"family": family,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
)
if not hosts:
raise OSError("DNS lookup failed")

View File

@ -1,6 +1,6 @@
from aiohttp.frozenlist import FrozenList
__all__ = ('Signal',)
__all__ = ("Signal",)
class Signal(FrozenList):
@ -12,16 +12,16 @@ class Signal(FrozenList):
arguments.
"""
__slots__ = ('_owner',)
__slots__ = ("_owner",)
def __init__(self, owner):
super().__init__()
self._owner = owner
def __repr__(self):
return '<Signal owner={}, frozen={}, {!r}>'.format(self._owner,
self.frozen,
list(self))
return "<Signal owner={}, frozen={}, {!r}>".format(
self._owner, self.frozen, list(self)
)
async def send(self, *args, **kwargs):
"""

View File

@ -2,16 +2,11 @@ from typing import Any, Generic, TypeVar
from aiohttp.frozenlist import FrozenList
__all__ = ('Signal',)
_T = TypeVar('_T')
__all__ = ("Signal",)
_T = TypeVar("_T")
class Signal(FrozenList[_T], Generic[_T]):
def __init__(self, owner: Any) -> None: ...
def __repr__(self) -> str: ...
async def send(self, *args: Any, **kwargs: Any) -> None: ...

View File

@ -1,25 +1,26 @@
import asyncio
import collections
import warnings
from typing import List # noqa
from typing import Awaitable, Callable, Generic, Optional, Tuple, TypeVar
from typing import Awaitable, Callable, Generic, List, Optional, Tuple, TypeVar
from .base_protocol import BaseProtocol
from .helpers import BaseTimerContext, set_exception, set_result
from .log import internal_logger
try: # pragma: no cover
from typing import Deque # noqa
from typing import Deque
except ImportError:
from typing_extensions import Deque # noqa
from typing_extensions import Deque
__all__ = (
'EMPTY_PAYLOAD', 'EofStream', 'StreamReader', 'DataQueue',
'FlowControlDataQueue')
"EMPTY_PAYLOAD",
"EofStream",
"StreamReader",
"DataQueue",
"FlowControlDataQueue",
)
DEFAULT_LIMIT = 2 ** 16
_T = TypeVar('_T')
_T = TypeVar("_T")
class EofStream(Exception):
@ -27,40 +28,37 @@ class EofStream(Exception):
class AsyncStreamIterator(Generic[_T]):
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
self.read_func = read_func
def __aiter__(self) -> 'AsyncStreamIterator[_T]':
def __aiter__(self) -> "AsyncStreamIterator[_T]":
return self
async def __anext__(self) -> _T:
try:
rv = await self.read_func()
except EofStream:
raise StopAsyncIteration # NOQA
if rv == b'':
raise StopAsyncIteration # NOQA
raise StopAsyncIteration
if rv == b"":
raise StopAsyncIteration
return rv
class ChunkTupleAsyncStreamIterator:
def __init__(self, stream: 'StreamReader') -> None:
def __init__(self, stream: "StreamReader") -> None:
self._stream = stream
def __aiter__(self) -> 'ChunkTupleAsyncStreamIterator':
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
return self
async def __anext__(self) -> Tuple[bytes, bool]:
rv = await self._stream.readchunk()
if rv == (b'', False):
raise StopAsyncIteration # NOQA
if rv == (b"", False):
raise StopAsyncIteration
return rv
class AsyncStreamReaderMixin:
def __aiter__(self) -> AsyncStreamIterator[bytes]:
return AsyncStreamIterator(self.readline) # type: ignore
@ -105,10 +103,14 @@ class StreamReader(AsyncStreamReaderMixin):
total_bytes = 0
def __init__(self, protocol: BaseProtocol,
*, limit: int=DEFAULT_LIMIT,
timer: Optional[BaseTimerContext]=None,
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
def __init__(
self,
protocol: BaseProtocol,
limit: int,
*,
timer: Optional[BaseTimerContext] = None,
loop: Optional[asyncio.AbstractEventLoop] = None
) -> None:
self._protocol = protocol
self._low_water = limit
self._high_water = limit * 2
@ -130,16 +132,19 @@ class StreamReader(AsyncStreamReaderMixin):
def __repr__(self) -> str:
info = [self.__class__.__name__]
if self._size:
info.append('%d bytes' % self._size)
info.append("%d bytes" % self._size)
if self._eof:
info.append('eof')
if self._low_water != DEFAULT_LIMIT:
info.append('low=%d high=%d' % (self._low_water, self._high_water))
info.append("eof")
if self._low_water != 2 ** 16: # default limit
info.append("low=%d high=%d" % (self._low_water, self._high_water))
if self._waiter:
info.append('w=%r' % self._waiter)
info.append("w=%r" % self._waiter)
if self._exception:
info.append('e=%r' % self._exception)
return '<%s>' % ' '.join(info)
info.append("e=%r" % self._exception)
return "<%s>" % " ".join(info)
def get_read_buffer_limits(self) -> Tuple[int, int]:
return (self._low_water, self._high_water)
def exception(self) -> Optional[BaseException]:
return self._exception
@ -163,7 +168,7 @@ class StreamReader(AsyncStreamReaderMixin):
try:
callback()
except Exception:
internal_logger.exception('Exception in eof callback')
internal_logger.exception("Exception in eof callback")
else:
self._eof_callbacks.append(callback)
@ -184,7 +189,7 @@ class StreamReader(AsyncStreamReaderMixin):
try:
cb()
except Exception:
internal_logger.exception('Exception in eof callback')
internal_logger.exception("Exception in eof callback")
self._eof_callbacks.clear()
@ -208,17 +213,18 @@ class StreamReader(AsyncStreamReaderMixin):
self._eof_waiter = None
def unread_data(self, data: bytes) -> None:
""" rollback reading some data from stream, inserting it to buffer head.
"""
warnings.warn("unread_data() is deprecated "
"and will be removed in future releases (#3260)",
DeprecationWarning,
stacklevel=2)
"""rollback reading some data from stream, inserting it to buffer head."""
warnings.warn(
"unread_data() is deprecated "
"and will be removed in future releases (#3260)",
DeprecationWarning,
stacklevel=2,
)
if not data:
return
if self._buffer_offset:
self._buffer[0] = self._buffer[0][self._buffer_offset:]
self._buffer[0] = self._buffer[0][self._buffer_offset :]
self._buffer_offset = 0
self._size += len(data)
self._cursor -= len(data)
@ -226,8 +232,8 @@ class StreamReader(AsyncStreamReaderMixin):
self._eof_counter = 0
# TODO: size is ignored, remove the param later
def feed_data(self, data: bytes, size: int=0) -> None:
assert not self._eof, 'feed_data after feed_eof'
def feed_data(self, data: bytes, size: int = 0) -> None:
assert not self._eof, "feed_data after feed_eof"
if not data:
return
@ -241,21 +247,23 @@ class StreamReader(AsyncStreamReaderMixin):
self._waiter = None
set_result(waiter, None)
if (self._size > self._high_water and
not self._protocol._reading_paused):
if self._size > self._high_water and not self._protocol._reading_paused:
self._protocol.pause_reading()
def begin_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
if self.total_bytes:
raise RuntimeError("Called begin_http_chunk_receiving when"
"some data was already fed")
raise RuntimeError(
"Called begin_http_chunk_receiving when" "some data was already fed"
)
self._http_chunk_splits = []
def end_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
raise RuntimeError("Called end_chunk_receiving without calling "
"begin_chunk_receiving first")
raise RuntimeError(
"Called end_chunk_receiving without calling "
"begin_chunk_receiving first"
)
# self._http_chunk_splits contains logical byte offsets from start of
# the body transfer. Each offset is the offset of the end of a chunk.
@ -286,8 +294,10 @@ class StreamReader(AsyncStreamReaderMixin):
# would have an unexpected behaviour. It would not possible to know
# which coroutine would get the next data.
if self._waiter is not None:
raise RuntimeError('%s() called while another coroutine is '
'already waiting for incoming data' % func_name)
raise RuntimeError(
"%s() called while another coroutine is "
"already waiting for incoming data" % func_name
)
waiter = self._waiter = self._loop.create_future()
try:
@ -310,7 +320,7 @@ class StreamReader(AsyncStreamReaderMixin):
while not_enough:
while self._buffer and not_enough:
offset = self._buffer_offset
ichar = self._buffer[0].find(b'\n', offset) + 1
ichar = self._buffer[0].find(b"\n", offset) + 1
# Read from current offset to found b'\n' or to the end.
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
line.append(data)
@ -319,17 +329,17 @@ class StreamReader(AsyncStreamReaderMixin):
not_enough = False
if line_size > self._high_water:
raise ValueError('Line is too long')
raise ValueError("Line is too long")
if self._eof:
break
if not_enough:
await self._wait('readline')
await self._wait("readline")
return b''.join(line)
return b"".join(line)
async def read(self, n: int=-1) -> bytes:
async def read(self, n: int = -1) -> bytes:
if self._exception is not None:
raise self._exception
@ -339,14 +349,16 @@ class StreamReader(AsyncStreamReaderMixin):
# lets keep this code one major release.
if __debug__:
if self._eof and not self._buffer:
self._eof_counter = getattr(self, '_eof_counter', 0) + 1
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
if self._eof_counter > 5:
internal_logger.warning(
'Multiple access to StreamReader in eof state, '
'might be infinite loop.', stack_info=True)
"Multiple access to StreamReader in eof state, "
"might be infinite loop.",
stack_info=True,
)
if not n:
return b''
return b""
if n < 0:
# This used to just loop creating a new waiter hoping to
@ -359,13 +371,13 @@ class StreamReader(AsyncStreamReaderMixin):
if not block:
break
blocks.append(block)
return b''.join(blocks)
return b"".join(blocks)
# TODO: should be `if` instead of `while`
# because waiter maybe triggered on chunk end,
# without feeding any data
while not self._buffer and not self._eof:
await self._wait('read')
await self._wait("read")
return self._read_nowait(n)
@ -377,7 +389,7 @@ class StreamReader(AsyncStreamReaderMixin):
# because waiter maybe triggered on chunk end,
# without feeding any data
while not self._buffer and not self._eof:
await self._wait('readany')
await self._wait("readany")
return self._read_nowait(-1)
@ -396,9 +408,11 @@ class StreamReader(AsyncStreamReaderMixin):
if pos == self._cursor:
return (b"", True)
if pos > self._cursor:
return (self._read_nowait(pos-self._cursor), True)
internal_logger.warning('Skipping HTTP chunk end due to data '
'consumption beyond chunk boundary')
return (self._read_nowait(pos - self._cursor), True)
internal_logger.warning(
"Skipping HTTP chunk end due to data "
"consumption beyond chunk boundary"
)
if self._buffer:
return (self._read_nowait_chunk(-1), False)
@ -407,9 +421,9 @@ class StreamReader(AsyncStreamReaderMixin):
if self._eof:
# Special case for signifying EOF.
# (b'', True) is not a final return value actually.
return (b'', False)
return (b"", False)
await self._wait('readchunk')
await self._wait("readchunk")
async def readexactly(self, n: int) -> bytes:
if self._exception is not None:
@ -419,15 +433,14 @@ class StreamReader(AsyncStreamReaderMixin):
while n > 0:
block = await self.read(n)
if not block:
partial = b''.join(blocks)
raise asyncio.IncompleteReadError(
partial, len(partial) + n)
partial = b"".join(blocks)
raise asyncio.IncompleteReadError(partial, len(partial) + n)
blocks.append(block)
n -= len(block)
return b''.join(blocks)
return b"".join(blocks)
def read_nowait(self, n: int=-1) -> bytes:
def read_nowait(self, n: int = -1) -> bytes:
# default was changed to be consistent with .read(-1)
#
# I believe the most users don't know about the method and
@ -437,7 +450,8 @@ class StreamReader(AsyncStreamReaderMixin):
if self._waiter and not self._waiter.done():
raise RuntimeError(
'Called while some coroutine is waiting for incoming data.')
"Called while some coroutine is waiting for incoming data."
)
return self._read_nowait(n)
@ -445,7 +459,7 @@ class StreamReader(AsyncStreamReaderMixin):
first_buffer = self._buffer[0]
offset = self._buffer_offset
if n != -1 and len(first_buffer) - offset > n:
data = first_buffer[offset:offset + n]
data = first_buffer[offset : offset + n]
self._buffer_offset += n
elif offset:
@ -469,7 +483,7 @@ class StreamReader(AsyncStreamReaderMixin):
return data
def _read_nowait(self, n: int) -> bytes:
""" Read not more than n bytes, or whole buffer is n == -1 """
""" Read not more than n bytes, or whole buffer if n == -1 """
chunks = []
while self._buffer:
@ -480,11 +494,10 @@ class StreamReader(AsyncStreamReaderMixin):
if n == 0:
break
return b''.join(chunks) if chunks else b''
return b"".join(chunks) if chunks else b""
class EmptyStreamReader(AsyncStreamReaderMixin):
def exception(self) -> Optional[BaseException]:
return None
@ -495,7 +508,7 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
try:
callback()
except Exception:
internal_logger.exception('Exception in eof callback')
internal_logger.exception("Exception in eof callback")
def feed_eof(self) -> None:
pass
@ -509,26 +522,26 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
async def wait_eof(self) -> None:
return
def feed_data(self, data: bytes, n: int=0) -> None:
def feed_data(self, data: bytes, n: int = 0) -> None:
pass
async def readline(self) -> bytes:
return b''
return b""
async def read(self, n: int=-1) -> bytes:
return b''
async def read(self, n: int = -1) -> bytes:
return b""
async def readany(self) -> bytes:
return b''
return b""
async def readchunk(self) -> Tuple[bytes, bool]:
return (b'', True)
return (b"", True)
async def readexactly(self, n: int) -> bytes:
raise asyncio.IncompleteReadError(b'', n)
raise asyncio.IncompleteReadError(b"", n)
def read_nowait(self) -> bytes:
return b''
return b""
EMPTY_PAYLOAD = EmptyStreamReader()
@ -566,7 +579,7 @@ class DataQueue(Generic[_T]):
self._waiter = None
set_exception(waiter, exc)
def feed_data(self, data: _T, size: int=0) -> None:
def feed_data(self, data: _T, size: int = 0) -> None:
self._size += size
self._buffer.append((data, size))
@ -612,15 +625,15 @@ class FlowControlDataQueue(DataQueue[_T]):
It is a destination for parsed data."""
def __init__(self, protocol: BaseProtocol, *,
limit: int=DEFAULT_LIMIT,
loop: asyncio.AbstractEventLoop) -> None:
def __init__(
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
) -> None:
super().__init__(loop=loop)
self._protocol = protocol
self._limit = limit * 2
def feed_data(self, data: _T, size: int=0) -> None:
def feed_data(self, data: _T, size: int = 0) -> None:
super().feed_data(data, size)
if self._size > self._limit and not self._protocol._reading_paused:

View File

@ -5,30 +5,25 @@ import socket
from contextlib import suppress
from typing import Optional # noqa
__all__ = ('tcp_keepalive', 'tcp_nodelay', 'tcp_cork')
__all__ = ("tcp_keepalive", "tcp_nodelay")
if hasattr(socket, 'TCP_CORK'): # pragma: no cover
CORK = socket.TCP_CORK # type: Optional[int]
elif hasattr(socket, 'TCP_NOPUSH'): # pragma: no cover
CORK = socket.TCP_NOPUSH # type: ignore
else: # pragma: no cover
CORK = None
if hasattr(socket, "SO_KEEPALIVE"):
if hasattr(socket, 'SO_KEEPALIVE'):
def tcp_keepalive(transport: asyncio.Transport) -> None:
sock = transport.get_extra_info('socket')
sock = transport.get_extra_info("socket")
if sock is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
else:
def tcp_keepalive(
transport: asyncio.Transport) -> None: # pragma: no cover
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
pass
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
sock = transport.get_extra_info('socket')
sock = transport.get_extra_info("socket")
if sock is None:
return
@ -40,24 +35,4 @@ def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
# socket may be closed already, on windows OSError get raised
with suppress(OSError):
sock.setsockopt(
socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
def tcp_cork(transport: asyncio.Transport, value: bool) -> None:
sock = transport.get_extra_info('socket')
if CORK is None:
return
if sock is None:
return
if sock.family not in (socket.AF_INET, socket.AF_INET6):
return
value = bool(value)
with suppress(OSError):
sock.setsockopt(
socket.IPPROTO_TCP, CORK, value)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)

View File

@ -5,21 +5,13 @@ import contextlib
import functools
import gc
import inspect
import os
import socket
import sys
import unittest
from abc import ABC, abstractmethod
from types import TracebackType
from typing import ( # noqa
TYPE_CHECKING,
Any,
Callable,
Iterator,
List,
Optional,
Type,
Union,
)
from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Type, Union
from unittest import mock
from multidict import CIMultiDict, CIMultiDictProxy
@ -34,8 +26,8 @@ from aiohttp.client import (
from . import ClientSession, hdrs
from .abc import AbstractCookieJar
from .client_reqrep import ClientResponse # noqa
from .client_ws import ClientWebSocketResponse # noqa
from .client_reqrep import ClientResponse
from .client_ws import ClientWebSocketResponse
from .helpers import sentinel
from .http import HttpVersion, RawRequestMessage
from .signals import Signal
@ -57,12 +49,20 @@ else:
SSLContext = None
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
def get_unused_port_socket(host: str) -> socket.socket:
return get_port_socket(host, 0)
def get_port_socket(host: str, port: int) -> socket.socket:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if REUSE_ADDRESS:
# Windows has different semantics for SO_REUSEADDR,
# so don't set it. Ref:
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((host, port))
return s
@ -70,21 +70,23 @@ def get_port_socket(host: str, port: int) -> socket.socket:
def unused_port() -> int:
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(('127.0.0.1', 0))
s.bind(("127.0.0.1", 0))
return s.getsockname()[1]
class BaseTestServer(ABC):
__test__ = False
def __init__(self,
*,
scheme: Union[str, object]=sentinel,
loop: Optional[asyncio.AbstractEventLoop]=None,
host: str='127.0.0.1',
port: Optional[int]=None,
skip_url_asserts: bool=False,
**kwargs: Any) -> None:
def __init__(
self,
*,
scheme: Union[str, object] = sentinel,
loop: Optional[asyncio.AbstractEventLoop] = None,
host: str = "127.0.0.1",
port: Optional[int] = None,
skip_url_asserts: bool = False,
**kwargs: Any,
) -> None:
self._loop = loop
self.runner = None # type: Optional[BaseRunner]
self._root = None # type: Optional[URL]
@ -94,13 +96,13 @@ class BaseTestServer(ABC):
self.scheme = scheme
self.skip_url_asserts = skip_url_asserts
async def start_server(self,
loop: Optional[asyncio.AbstractEventLoop]=None,
**kwargs: Any) -> None:
async def start_server(
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
) -> None:
if self.runner:
return
self._loop = loop
self._ssl = kwargs.pop('ssl', None)
self._ssl = kwargs.pop("ssl", None)
self.runner = await self._make_runner(**kwargs)
await self.runner.setup()
if not self.port:
@ -116,13 +118,11 @@ class BaseTestServer(ABC):
self.port = sockets[0].getsockname()[1]
if self.scheme is sentinel:
if self._ssl:
scheme = 'https'
scheme = "https"
else:
scheme = 'http'
scheme = "http"
self.scheme = scheme
self._root = URL('{}://{}:{}'.format(self.scheme,
self.host,
self.port))
self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
@abstractmethod # pragma: no cover
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
@ -176,31 +176,38 @@ class BaseTestServer(ABC):
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType]) -> None:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> 'BaseTestServer':
async def __aenter__(self) -> "BaseTestServer":
await self.start_server(loop=self._loop)
return self
async def __aexit__(self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType]) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
await self.close()
class TestServer(BaseTestServer):
def __init__(self, app: Application, *,
scheme: Union[str, object]=sentinel,
host: str='127.0.0.1',
port: Optional[int]=None,
**kwargs: Any):
def __init__(
self,
app: Application,
*,
scheme: Union[str, object] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
):
self.app = app
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
@ -209,20 +216,20 @@ class TestServer(BaseTestServer):
class RawTestServer(BaseTestServer):
def __init__(self, handler: _RequestHandler, *,
scheme: Union[str, object]=sentinel,
host: str='127.0.0.1',
port: Optional[int]=None,
**kwargs: Any) -> None:
def __init__(
self,
handler: _RequestHandler,
*,
scheme: Union[str, object] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
) -> None:
self._handler = handler
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
async def _make_runner(self,
debug: bool=True,
**kwargs: Any) -> ServerRunner:
srv = Server(
self._handler, loop=self._loop, debug=debug, **kwargs)
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
return ServerRunner(srv, debug=debug, **kwargs)
@ -233,22 +240,26 @@ class TestClient:
To write functional tests for aiohttp based servers.
"""
__test__ = False
def __init__(self, server: BaseTestServer, *,
cookie_jar: Optional[AbstractCookieJar]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
**kwargs: Any) -> None:
def __init__(
self,
server: BaseTestServer,
*,
cookie_jar: Optional[AbstractCookieJar] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any,
) -> None:
if not isinstance(server, BaseTestServer):
raise TypeError("server must be TestServer "
"instance, found type: %r" % type(server))
raise TypeError(
"server must be TestServer " "instance, found type: %r" % type(server)
)
self._server = server
self._loop = loop
if cookie_jar is None:
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
self._session = ClientSession(loop=loop,
cookie_jar=cookie_jar,
**kwargs)
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
self._closed = False
self._responses = [] # type: List[ClientResponse]
self._websockets = [] # type: List[ClientWebSocketResponse]
@ -286,17 +297,13 @@ class TestClient:
def make_url(self, path: str) -> URL:
return self._server.make_url(path)
async def _request(self, method: str, path: str,
**kwargs: Any) -> ClientResponse:
resp = await self._session.request(
method, self.make_url(path), **kwargs
)
async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
resp = await self._session.request(method, self.make_url(path), **kwargs)
# save it to close later
self._responses.append(resp)
return resp
def request(self, method: str, path: str,
**kwargs: Any) -> _RequestContextManager:
def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
"""Routes a request to tested http server.
The interface is identical to aiohttp.ClientSession.request,
@ -304,51 +311,35 @@ class TestClient:
test server.
"""
return _RequestContextManager(
self._request(method, path, **kwargs)
)
return _RequestContextManager(self._request(method, path, **kwargs))
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP GET request."""
return _RequestContextManager(
self._request(hdrs.METH_GET, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP POST request."""
return _RequestContextManager(
self._request(hdrs.METH_POST, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP OPTIONS request."""
return _RequestContextManager(
self._request(hdrs.METH_OPTIONS, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP HEAD request."""
return _RequestContextManager(
self._request(hdrs.METH_HEAD, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PUT request."""
return _RequestContextManager(
self._request(hdrs.METH_PUT, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_PATCH, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_DELETE, path, **kwargs)
)
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
"""Initiate websocket connection.
@ -356,14 +347,10 @@ class TestClient:
The api corresponds to aiohttp.ClientSession.ws_connect.
"""
return _WSRequestContextManager(
self._ws_connect(path, **kwargs)
)
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
async def _ws_connect(self, path: str,
**kwargs: Any) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(
self.make_url(path), **kwargs)
async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
self._websockets.append(ws)
return ws
@ -391,21 +378,25 @@ class TestClient:
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType]) -> None:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> 'TestClient':
async def __aenter__(self) -> "TestClient":
await self.start_server()
return self
async def __aexit__(self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType]) -> None:
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
await self.close()
@ -447,8 +438,7 @@ class AioHTTPTestCase(unittest.TestCase):
self.app = self.loop.run_until_complete(self.get_application())
self.server = self.loop.run_until_complete(self.get_server(self.app))
self.client = self.loop.run_until_complete(
self.get_client(self.server))
self.client = self.loop.run_until_complete(self.get_client(self.server))
self.loop.run_until_complete(self.client.start_server())
@ -484,8 +474,7 @@ def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
@functools.wraps(func, *args, **kwargs)
def new_func(self: Any, *inner_args: Any, **inner_kwargs: Any) -> Any:
return self.loop.run_until_complete(
func(self, *inner_args, **inner_kwargs))
return self.loop.run_until_complete(func(self, *inner_args, **inner_kwargs))
return new_func
@ -494,8 +483,9 @@ _LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
@contextlib.contextmanager
def loop_context(loop_factory: _LOOP_FACTORY=asyncio.new_event_loop,
fast: bool=False) -> Iterator[asyncio.AbstractEventLoop]:
def loop_context(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
) -> Iterator[asyncio.AbstractEventLoop]:
"""A contextmanager that creates an event_loop, for test purposes.
Handles the creation and cleanup of a test loop.
@ -506,7 +496,7 @@ def loop_context(loop_factory: _LOOP_FACTORY=asyncio.new_event_loop,
def setup_test_loop(
loop_factory: _LOOP_FACTORY=asyncio.new_event_loop
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
) -> asyncio.AbstractEventLoop:
"""Create and return an asyncio.BaseEventLoop
instance.
@ -517,22 +507,21 @@ def setup_test_loop(
loop = loop_factory()
try:
module = loop.__class__.__module__
skip_watcher = 'uvloop' in module
skip_watcher = "uvloop" in module
except AttributeError: # pragma: no cover
# Just in case
skip_watcher = True
asyncio.set_event_loop(loop)
if sys.platform != "win32" and not skip_watcher:
policy = asyncio.get_event_loop_policy()
watcher = asyncio.SafeChildWatcher() # type: ignore
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(loop)
with contextlib.suppress(NotImplementedError):
policy.set_child_watcher(watcher)
return loop
def teardown_test_loop(loop: asyncio.AbstractEventLoop,
fast: bool=False) -> None:
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
"""Teardown and cleanup an event_loop created
by setup_test_loop.
@ -567,11 +556,11 @@ def _create_app_mock() -> mock.MagicMock:
return app
def _create_transport(sslcontext: Optional[SSLContext]=None) -> mock.Mock:
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
transport = mock.Mock()
def get_extra_info(key: str) -> Optional[SSLContext]:
if key == 'sslcontext':
if key == "sslcontext":
return sslcontext
else:
return None
@ -580,19 +569,23 @@ def _create_transport(sslcontext: Optional[SSLContext]=None) -> mock.Mock:
return transport
def make_mocked_request(method: str, path: str,
headers: Any=None, *,
match_info: Any=sentinel,
version: HttpVersion=HttpVersion(1, 1),
closing: bool=False,
app: Any=None,
writer: Any=sentinel,
protocol: Any=sentinel,
transport: Any=sentinel,
payload: Any=sentinel,
sslcontext: Optional[SSLContext]=None,
client_max_size: int=1024**2,
loop: Any=...) -> Any:
def make_mocked_request(
method: str,
path: str,
headers: Any = None,
*,
match_info: Any = sentinel,
version: HttpVersion = HttpVersion(1, 1),
closing: bool = False,
app: Any = None,
writer: Any = sentinel,
protocol: Any = sentinel,
transport: Any = sentinel,
payload: Any = sentinel,
sslcontext: Optional[SSLContext] = None,
client_max_size: int = 1024 ** 2,
loop: Any = ...,
) -> Request:
"""Creates mocked web.Request testing purposes.
Useful in unit tests, when spinning full web server is overkill or
@ -611,16 +604,26 @@ def make_mocked_request(method: str, path: str,
if headers:
headers = CIMultiDictProxy(CIMultiDict(headers))
raw_hdrs = tuple(
(k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items())
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
else:
headers = CIMultiDictProxy(CIMultiDict())
raw_hdrs = ()
chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower()
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
message = RawRequestMessage(
method, path, version, headers,
raw_hdrs, closing, False, False, chunked, URL(path))
method,
path,
version,
headers,
raw_hdrs,
closing,
False,
False,
chunked,
URL(path),
)
if app is None:
app = _create_app_mock()
@ -645,21 +648,24 @@ def make_mocked_request(method: str, path: str,
if payload is sentinel:
payload = mock.Mock()
req = Request(message, payload,
protocol, writer, task, loop,
client_max_size=client_max_size)
req = Request(
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
)
match_info = UrlMappingMatchInfo(
{} if match_info is sentinel else match_info, mock.Mock())
{} if match_info is sentinel else match_info, mock.Mock()
)
match_info.add_app(app)
req._match_info = match_info
return req
def make_mocked_coro(return_value: Any=sentinel,
raise_exception: Any=sentinel) -> Any:
def make_mocked_coro(
return_value: Any = sentinel, raise_exception: Any = sentinel
) -> Any:
"""Creates a coroutine mock."""
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
if raise_exception is not sentinel:
raise raise_exception

View File

@ -1,48 +1,47 @@
from types import SimpleNamespace
from typing import TYPE_CHECKING, Awaitable, Callable, Type, Union
from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
import attr
from multidict import CIMultiDict # noqa
from multidict import CIMultiDict
from yarl import URL
from .client_reqrep import ClientResponse
from .signals import Signal
if TYPE_CHECKING: # pragma: no cover
from .client import ClientSession # noqa
from typing_extensions import Protocol
_SignalArgs = Union[
'TraceRequestStartParams',
'TraceRequestEndParams',
'TraceRequestExceptionParams',
'TraceConnectionQueuedStartParams',
'TraceConnectionQueuedEndParams',
'TraceConnectionCreateStartParams',
'TraceConnectionCreateEndParams',
'TraceConnectionReuseconnParams',
'TraceDnsResolveHostStartParams',
'TraceDnsResolveHostEndParams',
'TraceDnsCacheHitParams',
'TraceDnsCacheMissParams',
'TraceRequestRedirectParams',
'TraceRequestChunkSentParams',
'TraceResponseChunkReceivedParams',
]
_Signal = Signal[Callable[[ClientSession, SimpleNamespace, _SignalArgs],
Awaitable[None]]]
else:
_Signal = Signal
from .client import ClientSession
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
class _SignalCallback(Protocol[_ParamT_contra]):
def __call__(
self,
__client_session: ClientSession,
__trace_config_ctx: SimpleNamespace,
__params: _ParamT_contra,
) -> Awaitable[None]:
...
__all__ = (
'TraceConfig', 'TraceRequestStartParams', 'TraceRequestEndParams',
'TraceRequestExceptionParams', 'TraceConnectionQueuedStartParams',
'TraceConnectionQueuedEndParams', 'TraceConnectionCreateStartParams',
'TraceConnectionCreateEndParams', 'TraceConnectionReuseconnParams',
'TraceDnsResolveHostStartParams', 'TraceDnsResolveHostEndParams',
'TraceDnsCacheHitParams', 'TraceDnsCacheMissParams',
'TraceRequestRedirectParams',
'TraceRequestChunkSentParams', 'TraceResponseChunkReceivedParams',
"TraceConfig",
"TraceRequestStartParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionCreateEndParams",
"TraceConnectionReuseconnParams",
"TraceDnsResolveHostStartParams",
"TraceDnsResolveHostEndParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceRequestRedirectParams",
"TraceRequestChunkSentParams",
"TraceResponseChunkReceivedParams",
)
@ -51,34 +50,61 @@ class TraceConfig:
objects."""
def __init__(
self,
trace_config_ctx_factory: Type[SimpleNamespace]=SimpleNamespace
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
) -> None:
self._on_request_start = Signal(self) # type: _Signal
self._on_request_chunk_sent = Signal(self) # type: _Signal
self._on_response_chunk_received = Signal(self) # type: _Signal
self._on_request_end = Signal(self) # type: _Signal
self._on_request_exception = Signal(self) # type: _Signal
self._on_request_redirect = Signal(self) # type: _Signal
self._on_connection_queued_start = Signal(self) # type: _Signal
self._on_connection_queued_end = Signal(self) # type: _Signal
self._on_connection_create_start = Signal(self) # type: _Signal
self._on_connection_create_end = Signal(self) # type: _Signal
self._on_connection_reuseconn = Signal(self) # type: _Signal
self._on_dns_resolvehost_start = Signal(self) # type: _Signal
self._on_dns_resolvehost_end = Signal(self) # type: _Signal
self._on_dns_cache_hit = Signal(self) # type: _Signal
self._on_dns_cache_miss = Signal(self) # type: _Signal
self._on_request_start = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestStartParams]]
self._on_request_chunk_sent = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
self._on_response_chunk_received = Signal(
self
) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
self._on_request_end = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestEndParams]]
self._on_request_exception = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
self._on_request_redirect = Signal(
self
) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
self._on_connection_queued_start = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
self._on_connection_queued_end = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
self._on_connection_create_start = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
self._on_connection_create_end = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
self._on_connection_reuseconn = Signal(
self
) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
self._on_dns_resolvehost_start = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
self._on_dns_resolvehost_end = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
self._on_dns_cache_hit = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
self._on_dns_cache_miss = Signal(
self
) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
self._trace_config_ctx_factory = trace_config_ctx_factory # type: Type[SimpleNamespace] # noqa
self._trace_config_ctx_factory = trace_config_ctx_factory
def trace_config_ctx(
self,
trace_request_ctx: SimpleNamespace=None
) -> SimpleNamespace: # noqa
self, trace_request_ctx: Optional[SimpleNamespace] = None
) -> SimpleNamespace:
""" Return a new trace_config_ctx instance """
return self._trace_config_ctx_factory(
trace_request_ctx=trace_request_ctx)
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
def freeze(self) -> None:
self._on_request_start.freeze()
@ -98,290 +124,319 @@ class TraceConfig:
self._on_dns_cache_miss.freeze()
@property
def on_request_start(self) -> _Signal:
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
return self._on_request_start
@property
def on_request_chunk_sent(self) -> _Signal:
def on_request_chunk_sent(
self,
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
return self._on_request_chunk_sent
@property
def on_response_chunk_received(self) -> _Signal:
def on_response_chunk_received(
self,
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
return self._on_response_chunk_received
@property
def on_request_end(self) -> _Signal:
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
return self._on_request_end
@property
def on_request_exception(self) -> _Signal:
def on_request_exception(
self,
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
return self._on_request_exception
@property
def on_request_redirect(self) -> _Signal:
def on_request_redirect(
self,
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
return self._on_request_redirect
@property
def on_connection_queued_start(self) -> _Signal:
def on_connection_queued_start(
self,
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
return self._on_connection_queued_start
@property
def on_connection_queued_end(self) -> _Signal:
def on_connection_queued_end(
self,
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
return self._on_connection_queued_end
@property
def on_connection_create_start(self) -> _Signal:
def on_connection_create_start(
self,
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
return self._on_connection_create_start
@property
def on_connection_create_end(self) -> _Signal:
def on_connection_create_end(
self,
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
return self._on_connection_create_end
@property
def on_connection_reuseconn(self) -> _Signal:
def on_connection_reuseconn(
self,
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
return self._on_connection_reuseconn
@property
def on_dns_resolvehost_start(self) -> _Signal:
def on_dns_resolvehost_start(
self,
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
return self._on_dns_resolvehost_start
@property
def on_dns_resolvehost_end(self) -> _Signal:
def on_dns_resolvehost_end(
self,
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
return self._on_dns_resolvehost_end
@property
def on_dns_cache_hit(self) -> _Signal:
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
return self._on_dns_cache_hit
@property
def on_dns_cache_miss(self) -> _Signal:
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
return self._on_dns_cache_miss
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestStartParams:
""" Parameters sent by the `on_request_start` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
method: str
url: URL
headers: "CIMultiDict[str]"
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestChunkSentParams:
""" Parameters sent by the `on_request_chunk_sent` signal"""
chunk = attr.ib(type=bytes)
method: str
url: URL
chunk: bytes
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceResponseChunkReceivedParams:
""" Parameters sent by the `on_response_chunk_received` signal"""
chunk = attr.ib(type=bytes)
method: str
url: URL
chunk: bytes
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestEndParams:
""" Parameters sent by the `on_request_end` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
response = attr.ib(type=ClientResponse)
method: str
url: URL
headers: "CIMultiDict[str]"
response: ClientResponse
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestExceptionParams:
""" Parameters sent by the `on_request_exception` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
exception = attr.ib(type=BaseException)
method: str
url: URL
headers: "CIMultiDict[str]"
exception: BaseException
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestRedirectParams:
""" Parameters sent by the `on_request_redirect` signal"""
method = attr.ib(type=str)
url = attr.ib(type=URL)
headers = attr.ib(type='CIMultiDict[str]')
response = attr.ib(type=ClientResponse)
method: str
url: URL
headers: "CIMultiDict[str]"
response: ClientResponse
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionQueuedStartParams:
""" Parameters sent by the `on_connection_queued_start` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionQueuedEndParams:
""" Parameters sent by the `on_connection_queued_end` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionCreateStartParams:
""" Parameters sent by the `on_connection_create_start` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionCreateEndParams:
""" Parameters sent by the `on_connection_create_end` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionReuseconnParams:
""" Parameters sent by the `on_connection_reuseconn` signal"""
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsResolveHostStartParams:
""" Parameters sent by the `on_dns_resolvehost_start` signal"""
host = attr.ib(type=str)
host: str
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsResolveHostEndParams:
""" Parameters sent by the `on_dns_resolvehost_end` signal"""
host = attr.ib(type=str)
host: str
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsCacheHitParams:
""" Parameters sent by the `on_dns_cache_hit` signal"""
host = attr.ib(type=str)
host: str
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsCacheMissParams:
""" Parameters sent by the `on_dns_cache_miss` signal"""
host = attr.ib(type=str)
host: str
class Trace:
""" Internal class used to keep together the main dependencies used
"""Internal class used to keep together the main dependencies used
at the moment of send a signal."""
def __init__(self,
session: 'ClientSession',
trace_config: TraceConfig,
trace_config_ctx: SimpleNamespace) -> None:
def __init__(
self,
session: "ClientSession",
trace_config: TraceConfig,
trace_config_ctx: SimpleNamespace,
) -> None:
self._trace_config = trace_config
self._trace_config_ctx = trace_config_ctx
self._session = session
async def send_request_start(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]') -> None:
async def send_request_start(
self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
return await self._trace_config.on_request_start.send(
self._session,
self._trace_config_ctx,
TraceRequestStartParams(method, url, headers)
TraceRequestStartParams(method, url, headers),
)
async def send_request_chunk_sent(self, chunk: bytes) -> None:
async def send_request_chunk_sent(
self, method: str, url: URL, chunk: bytes
) -> None:
return await self._trace_config.on_request_chunk_sent.send(
self._session,
self._trace_config_ctx,
TraceRequestChunkSentParams(chunk)
TraceRequestChunkSentParams(method, url, chunk),
)
async def send_response_chunk_received(self, chunk: bytes) -> None:
async def send_response_chunk_received(
self, method: str, url: URL, chunk: bytes
) -> None:
return await self._trace_config.on_response_chunk_received.send(
self._session,
self._trace_config_ctx,
TraceResponseChunkReceivedParams(chunk)
TraceResponseChunkReceivedParams(method, url, chunk),
)
async def send_request_end(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]',
response: ClientResponse) -> None:
async def send_request_end(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
response: ClientResponse,
) -> None:
return await self._trace_config.on_request_end.send(
self._session,
self._trace_config_ctx,
TraceRequestEndParams(method, url, headers, response)
TraceRequestEndParams(method, url, headers, response),
)
async def send_request_exception(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]',
exception: BaseException) -> None:
async def send_request_exception(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
exception: BaseException,
) -> None:
return await self._trace_config.on_request_exception.send(
self._session,
self._trace_config_ctx,
TraceRequestExceptionParams(method, url, headers, exception)
TraceRequestExceptionParams(method, url, headers, exception),
)
async def send_request_redirect(self,
method: str,
url: URL,
headers: 'CIMultiDict[str]',
response: ClientResponse) -> None:
async def send_request_redirect(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
response: ClientResponse,
) -> None:
return await self._trace_config._on_request_redirect.send(
self._session,
self._trace_config_ctx,
TraceRequestRedirectParams(method, url, headers, response)
TraceRequestRedirectParams(method, url, headers, response),
)
async def send_connection_queued_start(self) -> None:
return await self._trace_config.on_connection_queued_start.send(
self._session,
self._trace_config_ctx,
TraceConnectionQueuedStartParams()
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
)
async def send_connection_queued_end(self) -> None:
return await self._trace_config.on_connection_queued_end.send(
self._session,
self._trace_config_ctx,
TraceConnectionQueuedEndParams()
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
)
async def send_connection_create_start(self) -> None:
return await self._trace_config.on_connection_create_start.send(
self._session,
self._trace_config_ctx,
TraceConnectionCreateStartParams()
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
)
async def send_connection_create_end(self) -> None:
return await self._trace_config.on_connection_create_end.send(
self._session,
self._trace_config_ctx,
TraceConnectionCreateEndParams()
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
)
async def send_connection_reuseconn(self) -> None:
return await self._trace_config.on_connection_reuseconn.send(
self._session,
self._trace_config_ctx,
TraceConnectionReuseconnParams()
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
)
async def send_dns_resolvehost_start(self, host: str) -> None:
return await self._trace_config.on_dns_resolvehost_start.send(
self._session,
self._trace_config_ctx,
TraceDnsResolveHostStartParams(host)
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
)
async def send_dns_resolvehost_end(self, host: str) -> None:
return await self._trace_config.on_dns_resolvehost_end.send(
self._session,
self._trace_config_ctx,
TraceDnsResolveHostEndParams(host)
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
)
async def send_dns_cache_hit(self, host: str) -> None:
return await self._trace_config.on_dns_cache_hit.send(
self._session,
self._trace_config_ctx,
TraceDnsCacheHitParams(host)
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
)
async def send_dns_cache_miss(self, host: str) -> None:
return await self._trace_config.on_dns_cache_miss.send(
self._session,
self._trace_config_ctx,
TraceDnsCacheMissParams(host)
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
)

View File

@ -1,24 +1,10 @@
import json
import os # noqa
import pathlib # noqa
import os
import pathlib
import sys
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterable,
Mapping,
Tuple,
Union,
)
from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Tuple, Union
from multidict import (
CIMultiDict,
CIMultiDictProxy,
MultiDict,
MultiDictProxy,
istr,
)
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
from yarl import URL
DEFAULT_JSON_ENCODER = json.dumps
@ -29,7 +15,7 @@ if TYPE_CHECKING: # pragma: no cover
_CIMultiDictProxy = CIMultiDictProxy[str]
_MultiDict = MultiDict[str]
_MultiDictProxy = MultiDictProxy[str]
from http.cookies import BaseCookie # noqa
from http.cookies import BaseCookie, Morsel
else:
_CIMultiDict = CIMultiDict
_CIMultiDictProxy = CIMultiDictProxy
@ -39,15 +25,22 @@ else:
Byteish = Union[bytes, bytearray, memoryview]
JSONEncoder = Callable[[Any], str]
JSONDecoder = Callable[[str], Any]
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict,
_CIMultiDictProxy]
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
StrOrURL = Union[str, URL]
LooseCookies = Union[Iterable[Tuple[str, 'BaseCookie[str]']],
Mapping[str, 'BaseCookie[str]'], 'BaseCookie[str]']
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
LooseCookiesIterables = Iterable[
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
]
LooseCookies = Union[
LooseCookiesMappings,
LooseCookiesIterables,
"BaseCookie[str]",
]
if sys.version_info >= (3, 6):
PathLike = Union[str, 'os.PathLike[str]']
PathLike = Union[str, "os.PathLike[str]"]
else:
PathLike = Union[str, pathlib.PurePath]

View File

@ -5,268 +5,274 @@ import sys
from argparse import ArgumentParser
from collections.abc import Iterable
from importlib import import_module
from typing import Any, Awaitable, Callable, List, Optional, Type, Union, cast
from typing import (
Any as Any,
Awaitable as Awaitable,
Callable as Callable,
Iterable as TypingIterable,
List as List,
Optional as Optional,
Set as Set,
Type as Type,
Union as Union,
cast as cast,
)
from .abc import AbstractAccessLogger
from .helpers import all_tasks
from .log import access_logger
from .web_app import Application as Application
from .web_app import CleanupError as CleanupError
from .web_exceptions import HTTPAccepted as HTTPAccepted
from .web_exceptions import HTTPBadGateway as HTTPBadGateway
from .web_exceptions import HTTPBadRequest as HTTPBadRequest
from .web_exceptions import HTTPClientError as HTTPClientError
from .web_exceptions import HTTPConflict as HTTPConflict
from .web_exceptions import HTTPCreated as HTTPCreated
from .web_exceptions import HTTPError as HTTPError
from .web_exceptions import HTTPException as HTTPException
from .web_exceptions import HTTPExpectationFailed as HTTPExpectationFailed
from .web_exceptions import HTTPFailedDependency as HTTPFailedDependency
from .web_exceptions import HTTPForbidden as HTTPForbidden
from .web_exceptions import HTTPFound as HTTPFound
from .web_exceptions import HTTPGatewayTimeout as HTTPGatewayTimeout
from .web_exceptions import HTTPGone as HTTPGone
from .web_exceptions import HTTPInsufficientStorage as HTTPInsufficientStorage
from .web_exceptions import HTTPInternalServerError as HTTPInternalServerError
from .web_exceptions import HTTPLengthRequired as HTTPLengthRequired
from .web_exceptions import HTTPMethodNotAllowed as HTTPMethodNotAllowed
from .web_exceptions import HTTPMisdirectedRequest as HTTPMisdirectedRequest
from .web_exceptions import HTTPMovedPermanently as HTTPMovedPermanently
from .web_exceptions import HTTPMultipleChoices as HTTPMultipleChoices
from .web_app import Application as Application, CleanupError as CleanupError
from .web_exceptions import (
HTTPAccepted as HTTPAccepted,
HTTPBadGateway as HTTPBadGateway,
HTTPBadRequest as HTTPBadRequest,
HTTPClientError as HTTPClientError,
HTTPConflict as HTTPConflict,
HTTPCreated as HTTPCreated,
HTTPError as HTTPError,
HTTPException as HTTPException,
HTTPExpectationFailed as HTTPExpectationFailed,
HTTPFailedDependency as HTTPFailedDependency,
HTTPForbidden as HTTPForbidden,
HTTPFound as HTTPFound,
HTTPGatewayTimeout as HTTPGatewayTimeout,
HTTPGone as HTTPGone,
HTTPInsufficientStorage as HTTPInsufficientStorage,
HTTPInternalServerError as HTTPInternalServerError,
HTTPLengthRequired as HTTPLengthRequired,
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
HTTPMovedPermanently as HTTPMovedPermanently,
HTTPMultipleChoices as HTTPMultipleChoices,
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
)
from .web_exceptions import HTTPNoContent as HTTPNoContent
from .web_exceptions import (
HTTPNoContent as HTTPNoContent,
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
)
from .web_exceptions import HTTPNotAcceptable as HTTPNotAcceptable
from .web_exceptions import HTTPNotExtended as HTTPNotExtended
from .web_exceptions import HTTPNotFound as HTTPNotFound
from .web_exceptions import HTTPNotImplemented as HTTPNotImplemented
from .web_exceptions import HTTPNotModified as HTTPNotModified
from .web_exceptions import HTTPOk as HTTPOk
from .web_exceptions import HTTPPartialContent as HTTPPartialContent
from .web_exceptions import HTTPPaymentRequired as HTTPPaymentRequired
from .web_exceptions import HTTPPermanentRedirect as HTTPPermanentRedirect
from .web_exceptions import HTTPPreconditionFailed as HTTPPreconditionFailed
from .web_exceptions import (
HTTPNotAcceptable as HTTPNotAcceptable,
HTTPNotExtended as HTTPNotExtended,
HTTPNotFound as HTTPNotFound,
HTTPNotImplemented as HTTPNotImplemented,
HTTPNotModified as HTTPNotModified,
HTTPOk as HTTPOk,
HTTPPartialContent as HTTPPartialContent,
HTTPPaymentRequired as HTTPPaymentRequired,
HTTPPermanentRedirect as HTTPPermanentRedirect,
HTTPPreconditionFailed as HTTPPreconditionFailed,
HTTPPreconditionRequired as HTTPPreconditionRequired,
)
from .web_exceptions import (
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
)
from .web_exceptions import HTTPRedirection as HTTPRedirection
from .web_exceptions import (
HTTPRedirection as HTTPRedirection,
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
)
from .web_exceptions import (
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
)
from .web_exceptions import (
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
)
from .web_exceptions import HTTPRequestTimeout as HTTPRequestTimeout
from .web_exceptions import HTTPRequestURITooLong as HTTPRequestURITooLong
from .web_exceptions import HTTPResetContent as HTTPResetContent
from .web_exceptions import HTTPSeeOther as HTTPSeeOther
from .web_exceptions import HTTPServerError as HTTPServerError
from .web_exceptions import HTTPServiceUnavailable as HTTPServiceUnavailable
from .web_exceptions import HTTPSuccessful as HTTPSuccessful
from .web_exceptions import HTTPTemporaryRedirect as HTTPTemporaryRedirect
from .web_exceptions import HTTPTooManyRequests as HTTPTooManyRequests
from .web_exceptions import HTTPUnauthorized as HTTPUnauthorized
from .web_exceptions import (
HTTPRequestTimeout as HTTPRequestTimeout,
HTTPRequestURITooLong as HTTPRequestURITooLong,
HTTPResetContent as HTTPResetContent,
HTTPSeeOther as HTTPSeeOther,
HTTPServerError as HTTPServerError,
HTTPServiceUnavailable as HTTPServiceUnavailable,
HTTPSuccessful as HTTPSuccessful,
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
HTTPTooManyRequests as HTTPTooManyRequests,
HTTPUnauthorized as HTTPUnauthorized,
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
)
from .web_exceptions import HTTPUnprocessableEntity as HTTPUnprocessableEntity
from .web_exceptions import (
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
)
from .web_exceptions import HTTPUpgradeRequired as HTTPUpgradeRequired
from .web_exceptions import HTTPUseProxy as HTTPUseProxy
from .web_exceptions import (
HTTPUpgradeRequired as HTTPUpgradeRequired,
HTTPUseProxy as HTTPUseProxy,
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
HTTPVersionNotSupported as HTTPVersionNotSupported,
)
from .web_exceptions import HTTPVersionNotSupported as HTTPVersionNotSupported
from .web_fileresponse import FileResponse as FileResponse
from .web_log import AccessLogger
from .web_middlewares import middleware as middleware
from .web_middlewares import (
middleware as middleware,
normalize_path_middleware as normalize_path_middleware,
)
from .web_protocol import PayloadAccessError as PayloadAccessError
from .web_protocol import RequestHandler as RequestHandler
from .web_protocol import RequestPayloadError as RequestPayloadError
from .web_request import BaseRequest as BaseRequest
from .web_request import FileField as FileField
from .web_request import Request as Request
from .web_response import ContentCoding as ContentCoding
from .web_response import Response as Response
from .web_response import StreamResponse as StreamResponse
from .web_response import json_response as json_response
from .web_routedef import AbstractRouteDef as AbstractRouteDef
from .web_routedef import RouteDef as RouteDef
from .web_routedef import RouteTableDef as RouteTableDef
from .web_routedef import StaticDef as StaticDef
from .web_routedef import delete as delete
from .web_routedef import get as get
from .web_routedef import head as head
from .web_routedef import options as options
from .web_routedef import patch as patch
from .web_routedef import post as post
from .web_routedef import put as put
from .web_routedef import route as route
from .web_routedef import static as static
from .web_routedef import view as view
from .web_runner import AppRunner as AppRunner
from .web_runner import BaseRunner as BaseRunner
from .web_runner import BaseSite as BaseSite
from .web_runner import GracefulExit as GracefulExit
from .web_runner import NamedPipeSite as NamedPipeSite
from .web_runner import ServerRunner as ServerRunner
from .web_runner import SockSite as SockSite
from .web_runner import TCPSite as TCPSite
from .web_runner import UnixSite as UnixSite
from .web_protocol import (
PayloadAccessError as PayloadAccessError,
RequestHandler as RequestHandler,
RequestPayloadError as RequestPayloadError,
)
from .web_request import (
BaseRequest as BaseRequest,
FileField as FileField,
Request as Request,
)
from .web_response import (
ContentCoding as ContentCoding,
Response as Response,
StreamResponse as StreamResponse,
json_response as json_response,
)
from .web_routedef import (
AbstractRouteDef as AbstractRouteDef,
RouteDef as RouteDef,
RouteTableDef as RouteTableDef,
StaticDef as StaticDef,
delete as delete,
get as get,
head as head,
options as options,
patch as patch,
post as post,
put as put,
route as route,
static as static,
view as view,
)
from .web_runner import (
AppRunner as AppRunner,
BaseRunner as BaseRunner,
BaseSite as BaseSite,
GracefulExit as GracefulExit,
NamedPipeSite as NamedPipeSite,
ServerRunner as ServerRunner,
SockSite as SockSite,
TCPSite as TCPSite,
UnixSite as UnixSite,
)
from .web_server import Server as Server
from .web_urldispatcher import AbstractResource as AbstractResource
from .web_urldispatcher import AbstractRoute as AbstractRoute
from .web_urldispatcher import DynamicResource as DynamicResource
from .web_urldispatcher import PlainResource as PlainResource
from .web_urldispatcher import Resource as Resource
from .web_urldispatcher import ResourceRoute as ResourceRoute
from .web_urldispatcher import StaticResource as StaticResource
from .web_urldispatcher import UrlDispatcher as UrlDispatcher
from .web_urldispatcher import UrlMappingMatchInfo as UrlMappingMatchInfo
from .web_urldispatcher import View as View
from .web_ws import WebSocketReady as WebSocketReady
from .web_ws import WebSocketResponse as WebSocketResponse
from .web_ws import WSMsgType as WSMsgType
from .web_urldispatcher import (
AbstractResource as AbstractResource,
AbstractRoute as AbstractRoute,
DynamicResource as DynamicResource,
PlainResource as PlainResource,
Resource as Resource,
ResourceRoute as ResourceRoute,
StaticResource as StaticResource,
UrlDispatcher as UrlDispatcher,
UrlMappingMatchInfo as UrlMappingMatchInfo,
View as View,
)
from .web_ws import (
WebSocketReady as WebSocketReady,
WebSocketResponse as WebSocketResponse,
WSMsgType as WSMsgType,
)
__all__ = (
# web_app
'Application',
'CleanupError',
"Application",
"CleanupError",
# web_exceptions
'HTTPAccepted',
'HTTPBadGateway',
'HTTPBadRequest',
'HTTPClientError',
'HTTPConflict',
'HTTPCreated',
'HTTPError',
'HTTPException',
'HTTPExpectationFailed',
'HTTPFailedDependency',
'HTTPForbidden',
'HTTPFound',
'HTTPGatewayTimeout',
'HTTPGone',
'HTTPInsufficientStorage',
'HTTPInternalServerError',
'HTTPLengthRequired',
'HTTPMethodNotAllowed',
'HTTPMisdirectedRequest',
'HTTPMovedPermanently',
'HTTPMultipleChoices',
'HTTPNetworkAuthenticationRequired',
'HTTPNoContent',
'HTTPNonAuthoritativeInformation',
'HTTPNotAcceptable',
'HTTPNotExtended',
'HTTPNotFound',
'HTTPNotImplemented',
'HTTPNotModified',
'HTTPOk',
'HTTPPartialContent',
'HTTPPaymentRequired',
'HTTPPermanentRedirect',
'HTTPPreconditionFailed',
'HTTPPreconditionRequired',
'HTTPProxyAuthenticationRequired',
'HTTPRedirection',
'HTTPRequestEntityTooLarge',
'HTTPRequestHeaderFieldsTooLarge',
'HTTPRequestRangeNotSatisfiable',
'HTTPRequestTimeout',
'HTTPRequestURITooLong',
'HTTPResetContent',
'HTTPSeeOther',
'HTTPServerError',
'HTTPServiceUnavailable',
'HTTPSuccessful',
'HTTPTemporaryRedirect',
'HTTPTooManyRequests',
'HTTPUnauthorized',
'HTTPUnavailableForLegalReasons',
'HTTPUnprocessableEntity',
'HTTPUnsupportedMediaType',
'HTTPUpgradeRequired',
'HTTPUseProxy',
'HTTPVariantAlsoNegotiates',
'HTTPVersionNotSupported',
"HTTPAccepted",
"HTTPBadGateway",
"HTTPBadRequest",
"HTTPClientError",
"HTTPConflict",
"HTTPCreated",
"HTTPError",
"HTTPException",
"HTTPExpectationFailed",
"HTTPFailedDependency",
"HTTPForbidden",
"HTTPFound",
"HTTPGatewayTimeout",
"HTTPGone",
"HTTPInsufficientStorage",
"HTTPInternalServerError",
"HTTPLengthRequired",
"HTTPMethodNotAllowed",
"HTTPMisdirectedRequest",
"HTTPMovedPermanently",
"HTTPMultipleChoices",
"HTTPNetworkAuthenticationRequired",
"HTTPNoContent",
"HTTPNonAuthoritativeInformation",
"HTTPNotAcceptable",
"HTTPNotExtended",
"HTTPNotFound",
"HTTPNotImplemented",
"HTTPNotModified",
"HTTPOk",
"HTTPPartialContent",
"HTTPPaymentRequired",
"HTTPPermanentRedirect",
"HTTPPreconditionFailed",
"HTTPPreconditionRequired",
"HTTPProxyAuthenticationRequired",
"HTTPRedirection",
"HTTPRequestEntityTooLarge",
"HTTPRequestHeaderFieldsTooLarge",
"HTTPRequestRangeNotSatisfiable",
"HTTPRequestTimeout",
"HTTPRequestURITooLong",
"HTTPResetContent",
"HTTPSeeOther",
"HTTPServerError",
"HTTPServiceUnavailable",
"HTTPSuccessful",
"HTTPTemporaryRedirect",
"HTTPTooManyRequests",
"HTTPUnauthorized",
"HTTPUnavailableForLegalReasons",
"HTTPUnprocessableEntity",
"HTTPUnsupportedMediaType",
"HTTPUpgradeRequired",
"HTTPUseProxy",
"HTTPVariantAlsoNegotiates",
"HTTPVersionNotSupported",
# web_fileresponse
'FileResponse',
"FileResponse",
# web_middlewares
'middleware',
'normalize_path_middleware',
"middleware",
"normalize_path_middleware",
# web_protocol
'PayloadAccessError',
'RequestHandler',
'RequestPayloadError',
"PayloadAccessError",
"RequestHandler",
"RequestPayloadError",
# web_request
'BaseRequest',
'FileField',
'Request',
"BaseRequest",
"FileField",
"Request",
# web_response
'ContentCoding',
'Response',
'StreamResponse',
'json_response',
"ContentCoding",
"Response",
"StreamResponse",
"json_response",
# web_routedef
'AbstractRouteDef',
'RouteDef',
'RouteTableDef',
'StaticDef',
'delete',
'get',
'head',
'options',
'patch',
'post',
'put',
'route',
'static',
'view',
"AbstractRouteDef",
"RouteDef",
"RouteTableDef",
"StaticDef",
"delete",
"get",
"head",
"options",
"patch",
"post",
"put",
"route",
"static",
"view",
# web_runner
'AppRunner',
'BaseRunner',
'BaseSite',
'GracefulExit',
'ServerRunner',
'SockSite',
'TCPSite',
'UnixSite',
'NamedPipeSite',
"AppRunner",
"BaseRunner",
"BaseSite",
"GracefulExit",
"ServerRunner",
"SockSite",
"TCPSite",
"UnixSite",
"NamedPipeSite",
# web_server
'Server',
"Server",
# web_urldispatcher
'AbstractResource',
'AbstractRoute',
'DynamicResource',
'PlainResource',
'Resource',
'ResourceRoute',
'StaticResource',
'UrlDispatcher',
'UrlMappingMatchInfo',
'View',
"AbstractResource",
"AbstractRoute",
"DynamicResource",
"PlainResource",
"Resource",
"ResourceRoute",
"StaticResource",
"UrlDispatcher",
"UrlMappingMatchInfo",
"View",
# web_ws
'WebSocketReady',
'WebSocketResponse',
'WSMsgType',
"WebSocketReady",
"WebSocketResponse",
"WSMsgType",
# web
'run_app',
"run_app",
)
@ -275,32 +281,40 @@ try:
except ImportError: # pragma: no cover
SSLContext = Any # type: ignore
HostSequence = TypingIterable[str]
async def _run_app(app: Union[Application, Awaitable[Application]], *,
host: Optional[str]=None,
port: Optional[int]=None,
path: Optional[str]=None,
sock: Optional[socket.socket]=None,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
print: Callable[..., None]=print,
backlog: int=128,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log_format: str=AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger]=access_logger,
handle_signals: bool=True,
reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
print: Callable[..., None] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
# A internal functio to actually do all dirty job for application running
if asyncio.iscoroutine(app):
app = await app # type: ignore
app = cast(Application, app)
runner = AppRunner(app, handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log)
runner = AppRunner(
app,
handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
)
await runner.setup()
@ -309,67 +323,117 @@ async def _run_app(app: Union[Application, Awaitable[Application]], *,
try:
if host is not None:
if isinstance(host, (str, bytes, bytearray, memoryview)):
sites.append(TCPSite(runner, host, port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port))
sites.append(
TCPSite(
runner,
host,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
else:
for h in host:
sites.append(TCPSite(runner, h, port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port))
sites.append(
TCPSite(
runner,
h,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
elif path is None and sock is None or port is not None:
sites.append(TCPSite(runner, port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port))
sites.append(
TCPSite(
runner,
port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
if path is not None:
if isinstance(path, (str, bytes, bytearray, memoryview)):
sites.append(UnixSite(runner, path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
UnixSite(
runner,
path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for p in path:
sites.append(UnixSite(runner, p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
UnixSite(
runner,
p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
if sock is not None:
if not isinstance(sock, Iterable):
sites.append(SockSite(runner, sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
SockSite(
runner,
sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for s in sock:
sites.append(SockSite(runner, s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog))
sites.append(
SockSite(
runner,
s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
for site in sites:
await site.start()
if print: # pragma: no branch
names = sorted(str(s.name) for s in runner.sites)
print("======== Running on {} ========\n"
"(Press CTRL+C to quit)".format(', '.join(names)))
print(
"======== Running on {} ========\n"
"(Press CTRL+C to quit)".format(", ".join(names))
)
# sleep forever by 1 hour intervals,
# on Windows before Python 3.8 wake up every 1 second to handle
# Ctrl+C smoothly
if sys.platform == "win32" and sys.version_info < (3, 8):
delay = 1
else:
delay = 3600
while True:
await asyncio.sleep(3600) # sleep forever by 1 hour intervals
await asyncio.sleep(delay)
finally:
await runner.cleanup()
def _cancel_all_tasks(loop: asyncio.AbstractEventLoop) -> None:
to_cancel = all_tasks(loop)
def _cancel_tasks(
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
) -> None:
if not to_cancel:
return
@ -377,64 +441,76 @@ def _cancel_all_tasks(loop: asyncio.AbstractEventLoop) -> None:
task.cancel()
loop.run_until_complete(
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))
asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
)
for task in to_cancel:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler({
'message': 'unhandled exception during asyncio.run() shutdown',
'exception': task.exception(),
'task': task,
})
loop.call_exception_handler(
{
"message": "unhandled exception during asyncio.run() shutdown",
"exception": task.exception(),
"task": task,
}
)
def run_app(app: Union[Application, Awaitable[Application]], *,
host: Optional[str]=None,
port: Optional[int]=None,
path: Optional[str]=None,
sock: Optional[socket.socket]=None,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
print: Callable[..., None]=print,
backlog: int=128,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log_format: str=AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger]=access_logger,
handle_signals: bool=True,
reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
def run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[socket.socket] = None,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
print: Callable[..., None] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
"""Run an app locally"""
loop = asyncio.get_event_loop()
# Configure if and only if in debugging mode and using the default logger
if loop.get_debug() and access_log and access_log.name == 'aiohttp.access':
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
if access_log.level == logging.NOTSET:
access_log.setLevel(logging.DEBUG)
if not access_log.hasHandlers():
access_log.addHandler(logging.StreamHandler())
try:
loop.run_until_complete(_run_app(app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port))
main_task = loop.create_task(
_run_app(
app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
loop.run_until_complete(main_task)
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
pass
finally:
_cancel_all_tasks(loop)
_cancel_tasks({main_task}, loop)
_cancel_tasks(all_tasks(loop), loop)
if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
@ -442,54 +518,57 @@ def run_app(app: Union[Application, Awaitable[Application]], *,
def main(argv: List[str]) -> None:
arg_parser = ArgumentParser(
description="aiohttp.web Application server",
prog="aiohttp.web"
description="aiohttp.web Application server", prog="aiohttp.web"
)
arg_parser.add_argument(
"entry_func",
help=("Callable returning the `aiohttp.web.Application` instance to "
"run. Should be specified in the 'module:function' syntax."),
metavar="entry-func"
help=(
"Callable returning the `aiohttp.web.Application` instance to "
"run. Should be specified in the 'module:function' syntax."
),
metavar="entry-func",
)
arg_parser.add_argument(
"-H", "--hostname",
"-H",
"--hostname",
help="TCP/IP hostname to serve on (default: %(default)r)",
default="localhost"
default="localhost",
)
arg_parser.add_argument(
"-P", "--port",
"-P",
"--port",
help="TCP/IP port to serve on (default: %(default)r)",
type=int,
default="8080"
default="8080",
)
arg_parser.add_argument(
"-U", "--path",
"-U",
"--path",
help="Unix file system path to serve on. Specifying a path will cause "
"hostname and port arguments to be ignored.",
"hostname and port arguments to be ignored.",
)
args, extra_argv = arg_parser.parse_known_args(argv)
# Import logic
mod_str, _, func_str = args.entry_func.partition(":")
if not func_str or not mod_str:
arg_parser.error(
"'entry-func' not in 'module:function' syntax"
)
arg_parser.error("'entry-func' not in 'module:function' syntax")
if mod_str.startswith("."):
arg_parser.error("relative module names not supported")
try:
module = import_module(mod_str)
except ImportError as ex:
arg_parser.error("unable to import %s: %s" % (mod_str, ex))
arg_parser.error(f"unable to import {mod_str}: {ex}")
try:
func = getattr(module, func_str)
except AttributeError:
arg_parser.error("module %r has no attribute %r" % (mod_str, func_str))
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
# Compatibility logic
if args.path is not None and not hasattr(socket, 'AF_UNIX'):
arg_parser.error("file system paths not supported by your operating"
" environment")
if args.path is not None and not hasattr(socket, "AF_UNIX"):
arg_parser.error(
"file system paths not supported by your operating" " environment"
)
logging.basicConfig(level=logging.DEBUG)

View File

@ -1,8 +1,8 @@
import asyncio
import logging
import warnings
from functools import partial
from typing import ( # noqa
from functools import partial, update_wrapper
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
@ -44,6 +44,7 @@ from .web_routedef import AbstractRouteDef
from .web_server import Server
from .web_urldispatcher import (
AbstractResource,
AbstractRoute,
Domain,
MaskDomain,
MatchedSubAppResource,
@ -51,21 +52,20 @@ from .web_urldispatcher import (
UrlDispatcher,
)
__all__ = ('Application', 'CleanupError')
__all__ = ("Application", "CleanupError")
if TYPE_CHECKING: # pragma: no cover
_AppSignal = Signal[Callable[['Application'], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse],
Awaitable[None]]]
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
_Handler = Callable[[Request], Awaitable[StreamResponse]]
_Middleware = Union[Callable[[Request, _Handler],
Awaitable[StreamResponse]],
Callable[['Application', _Handler], # old-style
Awaitable[_Handler]]]
_Middleware = Union[
Callable[[Request, _Handler], Awaitable[StreamResponse]],
Callable[["Application", _Handler], Awaitable[_Handler]], # old-style
]
_Middlewares = FrozenList[_Middleware]
_MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
_Subapps = List['Application']
_Subapps = List["Application"]
else:
# No type checker mode, skip types
_AppSignal = Signal
@ -78,37 +78,57 @@ else:
class Application(MutableMapping[str, Any]):
ATTRS = frozenset([
'logger', '_debug', '_router', '_loop', '_handler_args',
'_middlewares', '_middlewares_handlers', '_run_middlewares',
'_state', '_frozen', '_pre_frozen', '_subapps',
'_on_response_prepare', '_on_startup', '_on_shutdown',
'_on_cleanup', '_client_max_size', '_cleanup_ctx'])
ATTRS = frozenset(
[
"logger",
"_debug",
"_router",
"_loop",
"_handler_args",
"_middlewares",
"_middlewares_handlers",
"_run_middlewares",
"_state",
"_frozen",
"_pre_frozen",
"_subapps",
"_on_response_prepare",
"_on_startup",
"_on_shutdown",
"_on_cleanup",
"_client_max_size",
"_cleanup_ctx",
]
)
def __init__(self, *,
logger: logging.Logger=web_logger,
router: Optional[UrlDispatcher]=None,
middlewares: Iterable[_Middleware]=(),
handler_args: Mapping[str, Any]=None,
client_max_size: int=1024**2,
loop: Optional[asyncio.AbstractEventLoop]=None,
debug: Any=... # mypy doesn't support ellipsis
) -> None:
def __init__(
self,
*,
logger: logging.Logger = web_logger,
router: Optional[UrlDispatcher] = None,
middlewares: Iterable[_Middleware] = (),
handler_args: Optional[Mapping[str, Any]] = None,
client_max_size: int = 1024 ** 2,
loop: Optional[asyncio.AbstractEventLoop] = None,
debug: Any = ..., # mypy doesn't support ellipsis
) -> None:
if router is None:
router = UrlDispatcher()
else:
warnings.warn("router argument is deprecated", DeprecationWarning,
stacklevel=2)
warnings.warn(
"router argument is deprecated", DeprecationWarning, stacklevel=2
)
assert isinstance(router, AbstractRouter), router
if loop is not None:
warnings.warn("loop argument is deprecated", DeprecationWarning,
stacklevel=2)
warnings.warn(
"loop argument is deprecated", DeprecationWarning, stacklevel=2
)
if debug is not ...:
warnings.warn("debug argument is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"debug argument is deprecated", DeprecationWarning, stacklevel=2
)
self._debug = debug
self._router = router # type: UrlDispatcher
self._loop = loop
@ -136,19 +156,24 @@ class Application(MutableMapping[str, Any]):
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
self._client_max_size = client_max_size
def __init_subclass__(cls: Type['Application']) -> None:
warnings.warn("Inheritance class {} from web.Application "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2)
def __init_subclass__(cls: Type["Application"]) -> None:
warnings.warn(
"Inheritance class {} from web.Application "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2,
)
if DEBUG: # pragma: no cover
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn("Setting custom web.Application.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2)
warnings.warn(
"Setting custom web.Application.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
# MutableMapping API
@ -161,10 +186,11 @@ class Application(MutableMapping[str, Any]):
def _check_frozen(self) -> None:
if self._frozen:
warnings.warn("Changing state of started or joined "
"application is deprecated",
DeprecationWarning,
stacklevel=3)
warnings.warn(
"Changing state of started or joined " "application is deprecated",
DeprecationWarning,
stacklevel=3,
)
def __setitem__(self, key: str, value: Any) -> None:
self._check_frozen()
@ -186,9 +212,7 @@ class Application(MutableMapping[str, Any]):
# Technically the loop can be None
# but we mask it by explicit type cast
# to provide more convinient type annotation
warnings.warn("loop property is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
return cast(asyncio.AbstractEventLoop, self._loop)
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
@ -196,7 +220,8 @@ class Application(MutableMapping[str, Any]):
loop = asyncio.get_event_loop()
if self._loop is not None and self._loop is not loop:
raise RuntimeError(
"web.Application instance initialized with different loop")
"web.Application instance initialized with different loop"
)
self._loop = loop
@ -235,8 +260,7 @@ class Application(MutableMapping[str, Any]):
for subapp in self._subapps:
subapp.pre_freeze()
self._run_middlewares = (self._run_middlewares or
subapp._run_middlewares)
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
@property
def frozen(self) -> bool:
@ -253,41 +277,37 @@ class Application(MutableMapping[str, Any]):
@property
def debug(self) -> bool:
warnings.warn("debug property is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
return self._debug
def _reg_subapp_signals(self, subapp: 'Application') -> None:
def _reg_subapp_signals(self, subapp: "Application") -> None:
def reg_handler(signame: str) -> None:
subsig = getattr(subapp, signame)
async def handler(app: 'Application') -> None:
async def handler(app: "Application") -> None:
await subsig.send(subapp)
appsig = getattr(self, signame)
appsig.append(handler)
reg_handler('on_startup')
reg_handler('on_shutdown')
reg_handler('on_cleanup')
reg_handler("on_startup")
reg_handler("on_shutdown")
reg_handler("on_cleanup")
def add_subapp(self, prefix: str,
subapp: 'Application') -> AbstractResource:
def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
if not isinstance(prefix, str):
raise TypeError("Prefix must be str")
prefix = prefix.rstrip('/')
prefix = prefix.rstrip("/")
if not prefix:
raise ValueError("Prefix cannot be empty")
factory = partial(PrefixedSubAppResource, prefix, subapp)
return self._add_subapp(factory, subapp)
def _add_subapp(self,
resource_factory: Callable[[], AbstractResource],
subapp: 'Application') -> AbstractResource:
def _add_subapp(
self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
) -> AbstractResource:
if self.frozen:
raise RuntimeError(
"Cannot add sub application to frozen application")
raise RuntimeError("Cannot add sub application to frozen application")
if subapp.frozen:
raise RuntimeError("Cannot add frozen application")
resource = resource_factory()
@ -299,19 +319,18 @@ class Application(MutableMapping[str, Any]):
subapp._set_loop(self._loop)
return resource
def add_domain(self, domain: str,
subapp: 'Application') -> AbstractResource:
def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
if not isinstance(domain, str):
raise TypeError("Domain must be str")
elif '*' in domain:
elif "*" in domain:
rule = MaskDomain(domain) # type: Domain
else:
rule = Domain(domain)
factory = partial(MatchedSubAppResource, rule, subapp)
return self._add_subapp(factory, subapp)
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None:
self.router.add_routes(routes)
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
return self.router.add_routes(routes)
@property
def on_response_prepare(self) -> _RespPrepareSignal:
@ -330,7 +349,7 @@ class Application(MutableMapping[str, Any]):
return self._on_cleanup
@property
def cleanup_ctx(self) -> 'CleanupContext':
def cleanup_ctx(self) -> "CleanupContext":
return self._cleanup_ctx
@property
@ -341,45 +360,53 @@ class Application(MutableMapping[str, Any]):
def middlewares(self) -> _Middlewares:
return self._middlewares
def _make_handler(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None,
access_log_class: Type[
AbstractAccessLogger]=AccessLogger,
**kwargs: Any) -> Server:
def _make_handler(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
**kwargs: Any,
) -> Server:
if not issubclass(access_log_class, AbstractAccessLogger):
raise TypeError(
'access_log_class must be subclass of '
'aiohttp.abc.AbstractAccessLogger, got {}'.format(
access_log_class))
"access_log_class must be subclass of "
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
)
self._set_loop(loop)
self.freeze()
kwargs['debug'] = self._debug
kwargs['access_log_class'] = access_log_class
kwargs["debug"] = self._debug
kwargs["access_log_class"] = access_log_class
if self._handler_args:
for k, v in self._handler_args.items():
kwargs[k] = v
return Server(self._handle, # type: ignore
request_factory=self._make_request,
loop=self._loop, **kwargs)
return Server(
self._handle, # type: ignore
request_factory=self._make_request,
loop=self._loop,
**kwargs,
)
def make_handler(self, *,
loop: Optional[asyncio.AbstractEventLoop]=None,
access_log_class: Type[
AbstractAccessLogger]=AccessLogger,
**kwargs: Any) -> Server:
def make_handler(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
**kwargs: Any,
) -> Server:
warnings.warn("Application.make_handler(...) is deprecated, "
"use AppRunner API instead",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"Application.make_handler(...) is deprecated, " "use AppRunner API instead",
DeprecationWarning,
stacklevel=2,
)
return self._make_handler(loop=loop,
access_log_class=access_log_class,
**kwargs)
return self._make_handler(
loop=loop, access_log_class=access_log_class, **kwargs
)
async def startup(self) -> None:
"""Causes on_startup signal
@ -402,25 +429,35 @@ class Application(MutableMapping[str, Any]):
"""
await self.on_cleanup.send(self)
def _make_request(self, message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: 'asyncio.Task[None]',
_cls: Type[Request]=Request) -> Request:
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
_cls: Type[Request] = Request,
) -> Request:
return _cls(
message, payload, protocol, writer, task,
message,
payload,
protocol,
writer,
task,
self._loop,
client_max_size=self._client_max_size)
client_max_size=self._client_max_size,
)
def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
for m in reversed(self._middlewares):
if getattr(m, '__middleware_version__', None) == 1:
if getattr(m, "__middleware_version__", None) == 1:
yield m, True
else:
warnings.warn('old-style middleware "{!r}" deprecated, '
'see #2252'.format(m),
DeprecationWarning, stacklevel=2)
warnings.warn(
'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
DeprecationWarning,
stacklevel=2,
)
yield m, False
yield _fix_request_current_app(self), True
@ -431,8 +468,10 @@ class Application(MutableMapping[str, Any]):
match_info = await self._router.resolve(request)
if debug: # pragma: no cover
if not isinstance(match_info, AbstractMatchInfo):
raise TypeError("match_info should be AbstractMatchInfo "
"instance, not {!r}".format(match_info))
raise TypeError(
"match_info should be AbstractMatchInfo "
"instance, not {!r}".format(match_info)
)
match_info.add_app(self)
match_info.freeze()
@ -449,9 +488,11 @@ class Application(MutableMapping[str, Any]):
if self._run_middlewares:
for app in match_info.apps[::-1]:
for m, new_style in app._middlewares_handlers: # type: ignore # noqa
for m, new_style in app._middlewares_handlers: # type: ignore
if new_style:
handler = partial(m, handler=handler)
handler = update_wrapper(
partial(m, handler=handler), handler
)
else:
handler = await m(app, handler) # type: ignore
@ -459,7 +500,7 @@ class Application(MutableMapping[str, Any]):
return resp
def __call__(self) -> 'Application':
def __call__(self) -> "Application":
"""gunicorn compatibility"""
return self
@ -477,14 +518,12 @@ class CleanupError(RuntimeError):
if TYPE_CHECKING: # pragma: no cover
_CleanupContextBase = FrozenList[Callable[[Application],
AsyncIterator[None]]]
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
else:
_CleanupContextBase = FrozenList
class CleanupContext(_CleanupContextBase):
def __init__(self) -> None:
super().__init__()
self._exits = [] # type: List[AsyncIterator[None]]
@ -505,8 +544,7 @@ class CleanupContext(_CleanupContextBase):
except Exception as exc:
errors.append(exc)
else:
errors.append(RuntimeError("{!r} has more than one 'yield'"
.format(it)))
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
if errors:
if len(errors) == 1:
raise errors[0]

View File

@ -7,63 +7,63 @@ from .typedefs import LooseHeaders, StrOrURL
from .web_response import Response
__all__ = (
'HTTPException',
'HTTPError',
'HTTPRedirection',
'HTTPSuccessful',
'HTTPOk',
'HTTPCreated',
'HTTPAccepted',
'HTTPNonAuthoritativeInformation',
'HTTPNoContent',
'HTTPResetContent',
'HTTPPartialContent',
'HTTPMultipleChoices',
'HTTPMovedPermanently',
'HTTPFound',
'HTTPSeeOther',
'HTTPNotModified',
'HTTPUseProxy',
'HTTPTemporaryRedirect',
'HTTPPermanentRedirect',
'HTTPClientError',
'HTTPBadRequest',
'HTTPUnauthorized',
'HTTPPaymentRequired',
'HTTPForbidden',
'HTTPNotFound',
'HTTPMethodNotAllowed',
'HTTPNotAcceptable',
'HTTPProxyAuthenticationRequired',
'HTTPRequestTimeout',
'HTTPConflict',
'HTTPGone',
'HTTPLengthRequired',
'HTTPPreconditionFailed',
'HTTPRequestEntityTooLarge',
'HTTPRequestURITooLong',
'HTTPUnsupportedMediaType',
'HTTPRequestRangeNotSatisfiable',
'HTTPExpectationFailed',
'HTTPMisdirectedRequest',
'HTTPUnprocessableEntity',
'HTTPFailedDependency',
'HTTPUpgradeRequired',
'HTTPPreconditionRequired',
'HTTPTooManyRequests',
'HTTPRequestHeaderFieldsTooLarge',
'HTTPUnavailableForLegalReasons',
'HTTPServerError',
'HTTPInternalServerError',
'HTTPNotImplemented',
'HTTPBadGateway',
'HTTPServiceUnavailable',
'HTTPGatewayTimeout',
'HTTPVersionNotSupported',
'HTTPVariantAlsoNegotiates',
'HTTPInsufficientStorage',
'HTTPNotExtended',
'HTTPNetworkAuthenticationRequired',
"HTTPException",
"HTTPError",
"HTTPRedirection",
"HTTPSuccessful",
"HTTPOk",
"HTTPCreated",
"HTTPAccepted",
"HTTPNonAuthoritativeInformation",
"HTTPNoContent",
"HTTPResetContent",
"HTTPPartialContent",
"HTTPMultipleChoices",
"HTTPMovedPermanently",
"HTTPFound",
"HTTPSeeOther",
"HTTPNotModified",
"HTTPUseProxy",
"HTTPTemporaryRedirect",
"HTTPPermanentRedirect",
"HTTPClientError",
"HTTPBadRequest",
"HTTPUnauthorized",
"HTTPPaymentRequired",
"HTTPForbidden",
"HTTPNotFound",
"HTTPMethodNotAllowed",
"HTTPNotAcceptable",
"HTTPProxyAuthenticationRequired",
"HTTPRequestTimeout",
"HTTPConflict",
"HTTPGone",
"HTTPLengthRequired",
"HTTPPreconditionFailed",
"HTTPRequestEntityTooLarge",
"HTTPRequestURITooLong",
"HTTPUnsupportedMediaType",
"HTTPRequestRangeNotSatisfiable",
"HTTPExpectationFailed",
"HTTPMisdirectedRequest",
"HTTPUnprocessableEntity",
"HTTPFailedDependency",
"HTTPUpgradeRequired",
"HTTPPreconditionRequired",
"HTTPTooManyRequests",
"HTTPRequestHeaderFieldsTooLarge",
"HTTPUnavailableForLegalReasons",
"HTTPServerError",
"HTTPInternalServerError",
"HTTPNotImplemented",
"HTTPBadGateway",
"HTTPServiceUnavailable",
"HTTPGatewayTimeout",
"HTTPVersionNotSupported",
"HTTPVariantAlsoNegotiates",
"HTTPInsufficientStorage",
"HTTPNotExtended",
"HTTPNetworkAuthenticationRequired",
)
@ -71,6 +71,7 @@ __all__ = (
# HTTP Exceptions
############################################################
class HTTPException(Response, Exception):
# You should set in subclasses:
@ -81,22 +82,32 @@ class HTTPException(Response, Exception):
__http_exception__ = True
def __init__(self, *,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
def __init__(
self,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
if body is not None:
warnings.warn(
"body argument is deprecated for http web exceptions",
DeprecationWarning)
Response.__init__(self, status=self.status_code,
headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
DeprecationWarning,
)
Response.__init__(
self,
status=self.status_code,
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
Exception.__init__(self, self.reason)
if self.body is None and not self.empty_body:
self.text = "{}: {}".format(self.status, self.reason)
self.text = f"{self.status}: {self.reason}"
def __bool__(self) -> bool:
return True
@ -150,20 +161,26 @@ class HTTPPartialContent(HTTPSuccessful):
class _HTTPMove(HTTPRedirection):
def __init__(self,
location: StrOrURL,
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
def __init__(
self,
location: StrOrURL,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
if not location:
raise ValueError("HTTP redirects need a location to redirect to.")
super().__init__(headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
self.headers['Location'] = str(URL(location))
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Location"] = str(URL(location))
self.location = location
@ -236,19 +253,26 @@ class HTTPNotFound(HTTPClientError):
class HTTPMethodNotAllowed(HTTPClientError):
status_code = 405
def __init__(self,
method: str,
allowed_methods: Iterable[str],
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
allow = ','.join(sorted(allowed_methods))
super().__init__(headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
self.headers['Allow'] = allow
def __init__(
self,
method: str,
allowed_methods: Iterable[str],
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
allow = ",".join(sorted(allowed_methods))
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Allow"] = allow
self.allowed_methods = set(allowed_methods) # type: Set[str]
self.method = method.upper()
@ -284,14 +308,11 @@ class HTTPPreconditionFailed(HTTPClientError):
class HTTPRequestEntityTooLarge(HTTPClientError):
status_code = 413
def __init__(self,
max_size: float,
actual_size: float,
**kwargs: Any) -> None:
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
kwargs.setdefault(
'text',
'Maximum request body size {} exceeded, '
'actual body size {}'.format(max_size, actual_size)
"text",
"Maximum request body size {} exceeded, "
"actual body size {}".format(max_size, actual_size),
)
super().__init__(**kwargs)
@ -343,17 +364,24 @@ class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
class HTTPUnavailableForLegalReasons(HTTPClientError):
status_code = 451
def __init__(self,
link: str,
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
body: Any=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
super().__init__(headers=headers, reason=reason,
body=body, text=text, content_type=content_type)
self.headers['Link'] = '<%s>; rel="blocked-by"' % link
def __init__(
self,
link: str,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Link"] = '<%s>; rel="blocked-by"' % link
self.link = link

View File

@ -2,7 +2,7 @@ import asyncio
import mimetypes
import os
import pathlib
from functools import partial
import sys
from typing import ( # noqa
IO,
TYPE_CHECKING,
@ -17,10 +17,6 @@ from typing import ( # noqa
from . import hdrs
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import set_exception, set_result
from .http_writer import StreamWriter
from .log import server_logger
from .typedefs import LooseHeaders
from .web_exceptions import (
HTTPNotModified,
@ -30,10 +26,10 @@ from .web_exceptions import (
)
from .web_response import StreamResponse
__all__ = ('FileResponse',)
__all__ = ("FileResponse",)
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest # noqa
from .web_request import BaseRequest
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
@ -42,93 +38,17 @@ _T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
class SendfileStreamWriter(StreamWriter):
def __init__(self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
fobj: IO[Any],
count: int,
on_chunk_sent: _T_OnChunkSent=None) -> None:
super().__init__(protocol, loop, on_chunk_sent)
self._sendfile_buffer = [] # type: List[bytes]
self._fobj = fobj
self._count = count
self._offset = fobj.tell()
self._in_fd = fobj.fileno()
def _write(self, chunk: bytes) -> None:
# we overwrite StreamWriter._write, so nothing can be appended to
# _buffer, and nothing is written to the transport directly by the
# parent class
self.output_size += len(chunk)
self._sendfile_buffer.append(chunk)
def _sendfile_cb(self, fut: 'asyncio.Future[None]', out_fd: int) -> None:
if fut.cancelled():
return
try:
if self._do_sendfile(out_fd):
set_result(fut, None)
except Exception as exc:
set_exception(fut, exc)
def _do_sendfile(self, out_fd: int) -> bool:
try:
n = os.sendfile(out_fd,
self._in_fd,
self._offset,
self._count)
if n == 0: # in_fd EOF reached
n = self._count
except (BlockingIOError, InterruptedError):
n = 0
self.output_size += n
self._offset += n
self._count -= n
assert self._count >= 0
return self._count == 0
def _done_fut(self, out_fd: int, fut: 'asyncio.Future[None]') -> None:
self.loop.remove_writer(out_fd)
async def sendfile(self) -> None:
assert self.transport is not None
out_socket = self.transport.get_extra_info('socket').dup()
out_socket.setblocking(False)
out_fd = out_socket.fileno()
loop = self.loop
data = b''.join(self._sendfile_buffer)
try:
await loop.sock_sendall(out_socket, data)
if not self._do_sendfile(out_fd):
fut = loop.create_future()
fut.add_done_callback(partial(self._done_fut, out_fd))
loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd)
await fut
except asyncio.CancelledError:
raise
except Exception:
server_logger.debug('Socket error')
self.transport.close()
finally:
out_socket.close()
await super().write_eof()
async def write_eof(self, chunk: bytes=b'') -> None:
pass
class FileResponse(StreamResponse):
"""A response object can be used to send files."""
def __init__(self, path: Union[str, pathlib.Path],
chunk_size: int=256*1024,
status: int=200,
reason: Optional[str]=None,
headers: Optional[LooseHeaders]=None) -> None:
def __init__(
self,
path: Union[str, pathlib.Path],
chunk_size: int = 256 * 1024,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
) -> None:
super().__init__(status=status, reason=reason, headers=headers)
if isinstance(path, str):
@ -137,83 +57,55 @@ class FileResponse(StreamResponse):
self._path = path
self._chunk_size = chunk_size
async def _sendfile_system(self, request: 'BaseRequest',
fobj: IO[Any],
count: int) -> AbstractStreamWriter:
# Write count bytes of fobj to resp using
# the os.sendfile system call.
#
# For details check
# https://github.com/KeepSafe/aiohttp/issues/1177
# See https://github.com/KeepSafe/aiohttp/issues/958 for details
#
# request should be an aiohttp.web.Request instance.
# fobj should be an open file object.
# count should be an integer > 0.
transport = request.transport
assert transport is not None
if (transport.get_extra_info("sslcontext") or
transport.get_extra_info("socket") is None or
self.compression):
writer = await self._sendfile_fallback(request, fobj, count)
else:
writer = SendfileStreamWriter(
request.protocol,
request._loop,
fobj,
count
)
request._payload_writer = writer
await super().prepare(request)
await writer.sendfile()
return writer
async def _sendfile_fallback(self, request: 'BaseRequest',
fobj: IO[Any],
count: int) -> AbstractStreamWriter:
# Mimic the _sendfile_system() method, but without using the
# os.sendfile() system call. This should be used on systems
# that don't support the os.sendfile().
async def _sendfile_fallback(
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
# To keep memory usage low,fobj is transferred in chunks
# controlled by the constructor's chunk_size argument.
writer = await super().prepare(request)
assert writer is not None
chunk_size = self._chunk_size
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, fobj.seek, offset)
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
while chunk:
await writer.write(chunk)
count = count - chunk_size
if count <= 0:
break
chunk = await loop.run_in_executor(
None, fobj.read, min(chunk_size, count)
)
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
await writer.drain()
return writer
if hasattr(os, "sendfile") and not NOSENDFILE: # pragma: no cover
_sendfile = _sendfile_system
else: # pragma: no cover
_sendfile = _sendfile_fallback
async def _sendfile(
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
writer = await super().prepare(request)
assert writer is not None
async def prepare(
self,
request: 'BaseRequest'
) -> Optional[AbstractStreamWriter]:
if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
return await self._sendfile_fallback(writer, fobj, offset, count)
loop = request._loop
transport = request.transport
assert transport is not None
try:
await loop.sendfile(transport, fobj, offset, count)
except NotImplementedError:
return await self._sendfile_fallback(writer, fobj, offset, count)
await super().write_eof()
return writer
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
filepath = self._path
gzip = False
if 'gzip' in request.headers.get(hdrs.ACCEPT_ENCODING, ''):
gzip_path = filepath.with_name(filepath.name + '.gz')
if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
gzip_path = filepath.with_name(filepath.name + ".gz")
if gzip_path.is_file():
filepath = gzip_path
@ -238,10 +130,10 @@ class FileResponse(StreamResponse):
if hdrs.CONTENT_TYPE not in self.headers:
ct, encoding = mimetypes.guess_type(str(filepath))
if not ct:
ct = 'application/octet-stream'
ct = "application/octet-stream"
should_set_ct = True
else:
encoding = 'gzip' if gzip else None
encoding = "gzip" if gzip else None
should_set_ct = False
status = self._status
@ -273,8 +165,7 @@ class FileResponse(StreamResponse):
#
# Will do the same below. Many servers ignore this and do not
# send a Content-Range header with HTTP 416
self.headers[hdrs.CONTENT_RANGE] = 'bytes */{0}'.format(
file_size)
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
@ -296,8 +187,9 @@ class FileResponse(StreamResponse):
# of the representation (i.e., the server replaces the
# value of last-byte-pos with a value that is one less than
# the current length of the selected representation).
count = min(end if end is not None else file_size,
file_size) - start
count = (
min(end if end is not None else file_size, file_size) - start
)
if start >= file_size:
# HTTP 416 should be returned in this case.
@ -309,8 +201,7 @@ class FileResponse(StreamResponse):
# suffix-byte-range-spec with a non-zero suffix-length,
# then the byte-range-set is satisfiable. Otherwise, the
# byte-range-set is unsatisfiable.
self.headers[hdrs.CONTENT_RANGE] = 'bytes */{0}'.format(
file_size)
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
@ -328,19 +219,25 @@ class FileResponse(StreamResponse):
self.last_modified = st.st_mtime # type: ignore
self.content_length = count
self.headers[hdrs.ACCEPT_RANGES] = 'bytes'
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
real_start = cast(int, start)
if status == HTTPPartialContent.status_code:
self.headers[hdrs.CONTENT_RANGE] = 'bytes {0}-{1}/{2}'.format(
real_start, real_start + count - 1, file_size)
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
real_start, real_start + count - 1, file_size
)
fobj = await loop.run_in_executor(None, filepath.open, 'rb')
if request.method == hdrs.METH_HEAD or self.status in [204, 304]:
return await super().prepare(request)
fobj = await loop.run_in_executor(None, filepath.open, "rb")
if start: # be aware that start could be None or int=0 here.
await loop.run_in_executor(None, fobj.seek, start)
offset = start
else:
offset = 0
try:
return await self._sendfile(request, fobj, count)
return await self._sendfile(request, fobj, offset, count)
finally:
await loop.run_in_executor(None, fobj.close)

View File

@ -10,7 +10,7 @@ from .abc import AbstractAccessLogger
from .web_request import BaseRequest
from .web_response import StreamResponse
KeyMethod = namedtuple('KeyMethod', 'key method')
KeyMethod = namedtuple("KeyMethod", "key method")
class AccessLogger(AbstractAccessLogger):
@ -39,27 +39,27 @@ class AccessLogger(AbstractAccessLogger):
%{FOO}e os.environ['FOO']
"""
LOG_FORMAT_MAP = {
'a': 'remote_address',
't': 'request_start_time',
'P': 'process_id',
'r': 'first_request_line',
's': 'response_status',
'b': 'response_size',
'T': 'request_time',
'Tf': 'request_time_frac',
'D': 'request_time_micro',
'i': 'request_header',
'o': 'response_header',
"a": "remote_address",
"t": "request_start_time",
"P": "process_id",
"r": "first_request_line",
"s": "response_status",
"b": "response_size",
"T": "request_time",
"Tf": "request_time_frac",
"D": "request_time_micro",
"i": "request_header",
"o": "response_header",
}
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
FORMAT_RE = re.compile(r'%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)')
CLEANUP_RE = re.compile(r'(%[^s])')
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
CLEANUP_RE = re.compile(r"(%[^s])")
_FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
def __init__(self, logger: logging.Logger,
log_format: str=LOG_FORMAT) -> None:
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
"""Initialise the logger.
logger is a logger object to be used for logging.
@ -101,119 +101,92 @@ class AccessLogger(AbstractAccessLogger):
methods = list()
for atom in self.FORMAT_RE.findall(log_format):
if atom[1] == '':
if atom[1] == "":
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
m = getattr(AccessLogger, '_format_%s' % atom[0])
m = getattr(AccessLogger, "_format_%s" % atom[0])
key_method = KeyMethod(format_key1, m)
else:
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
m = getattr(AccessLogger, '_format_%s' % atom[2])
key_method = KeyMethod(format_key2,
functools.partial(m, atom[1]))
m = getattr(AccessLogger, "_format_%s" % atom[2])
key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
methods.append(key_method)
log_format = self.FORMAT_RE.sub(r'%s', log_format)
log_format = self.CLEANUP_RE.sub(r'%\1', log_format)
log_format = self.FORMAT_RE.sub(r"%s", log_format)
log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
return log_format, methods
@staticmethod
def _format_i(key: str,
request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_i(
key: str, request: BaseRequest, response: StreamResponse, time: float
) -> str:
if request is None:
return '(no headers)'
return "(no headers)"
# suboptimal, make istr(key) once
return request.headers.get(key, '-')
return request.headers.get(key, "-")
@staticmethod
def _format_o(key: str,
request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_o(
key: str, request: BaseRequest, response: StreamResponse, time: float
) -> str:
# suboptimal, make istr(key) once
return response.headers.get(key, '-')
return response.headers.get(key, "-")
@staticmethod
def _format_a(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
if request is None:
return '-'
return "-"
ip = request.remote
return ip if ip is not None else '-'
return ip if ip is not None else "-"
@staticmethod
def _format_t(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
now = datetime.datetime.utcnow()
start_time = now - datetime.timedelta(seconds=time)
return start_time.strftime('[%d/%b/%Y:%H:%M:%S +0000]')
return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
@staticmethod
def _format_P(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
return "<%s>" % os.getpid()
@staticmethod
def _format_r(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
if request is None:
return '-'
return '%s %s HTTP/%s.%s' % (request.method, request.path_qs,
request.version.major,
request.version.minor)
return "-"
return "{} {} HTTP/{}.{}".format(
request.method,
request.path_qs,
request.version.major,
request.version.minor,
)
@staticmethod
def _format_s(request: BaseRequest,
response: StreamResponse,
time: float) -> int:
def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
return response.status
@staticmethod
def _format_b(request: BaseRequest,
response: StreamResponse,
time: float) -> int:
def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
return response.body_length
@staticmethod
def _format_T(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
return str(round(time))
@staticmethod
def _format_Tf(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
return '%06f' % time
def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
return "%06f" % time
@staticmethod
def _format_D(request: BaseRequest,
response: StreamResponse,
time: float) -> str:
def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
return str(round(time * 1000000))
def _format_line(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> Iterable[Tuple[str,
Callable[[BaseRequest,
StreamResponse,
float],
str]]]:
return [(key, method(request, response, time))
for key, method in self._methods]
def _format_line(
self, request: BaseRequest, response: StreamResponse, time: float
) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
return [(key, method(request, response, time)) for key, method in self._methods]
def log(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
try:
fmt_info = self._format_line(request, response, time)
@ -225,10 +198,10 @@ class AccessLogger(AbstractAccessLogger):
if key.__class__ is str:
extra[key] = value
else:
k1, k2 = key
dct = extra.get(k1, {}) # type: Any
dct[k2] = value
extra[k1] = dct
k1, k2 = key # type: ignore
dct = extra.get(k1, {}) # type: ignore
dct[k2] = value # type: ignore
extra[k1] = dct # type: ignore
self.logger.info(self._log_format % tuple(values), extra=extra)
except Exception:

View File

@ -7,18 +7,17 @@ from .web_response import StreamResponse
from .web_urldispatcher import SystemRoute
__all__ = (
'middleware',
'normalize_path_middleware',
"middleware",
"normalize_path_middleware",
)
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application # noqa
from .web_app import Application
_Func = TypeVar('_Func')
_Func = TypeVar("_Func")
async def _check_request_resolves(request: Request,
path: str) -> Tuple[bool, Request]:
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
alt_request = request.clone(rel_url=path)
match_info = await request.app.router.resolve(alt_request)
@ -40,9 +39,12 @@ _Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]]
def normalize_path_middleware(
*, append_slash: bool=True, remove_slash: bool=False,
merge_slashes: bool=True,
redirect_class: Type[_HTTPMove]=HTTPPermanentRedirect) -> _Middleware:
*,
append_slash: bool = True,
remove_slash: bool = False,
merge_slashes: bool = True,
redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect
) -> _Middleware:
"""
Middleware factory which produces a middleware that normalizes
the path of a request. By normalizing it means:
@ -80,29 +82,28 @@ def normalize_path_middleware(
async def impl(request: Request, handler: _Handler) -> StreamResponse:
if isinstance(request.match_info.route, SystemRoute):
paths_to_check = []
if '?' in request.raw_path:
path, query = request.raw_path.split('?', 1)
query = '?' + query
if "?" in request.raw_path:
path, query = request.raw_path.split("?", 1)
query = "?" + query
else:
query = ''
query = ""
path = request.raw_path
if merge_slashes:
paths_to_check.append(re.sub('//+', '/', path))
if append_slash and not request.path.endswith('/'):
paths_to_check.append(path + '/')
if remove_slash and request.path.endswith('/'):
paths_to_check.append(re.sub("//+", "/", path))
if append_slash and not request.path.endswith("/"):
paths_to_check.append(path + "/")
if remove_slash and request.path.endswith("/"):
paths_to_check.append(path[:-1])
if merge_slashes and append_slash:
paths_to_check.append(
re.sub('//+', '/', path + '/'))
paths_to_check.append(re.sub("//+", "/", path + "/"))
if merge_slashes and remove_slash:
merged_slashes = re.sub('//+', '/', path)
merged_slashes = re.sub("//+", "/", path)
paths_to_check.append(merged_slashes[:-1])
for path in paths_to_check:
resolves, request = await _check_request_resolves(
request, path)
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
resolves, request = await _check_request_resolves(request, path)
if resolves:
raise redirect_class(request.raw_path + query)
@ -111,10 +112,10 @@ def normalize_path_middleware(
return impl
def _fix_request_current_app(app: 'Application') -> _Middleware:
def _fix_request_current_app(app: "Application") -> _Middleware:
@middleware
async def impl(request: Request, handler: _Handler) -> StreamResponse:
with request.match_info.set_current_app(app):
return await handler(request)
return impl

View File

@ -7,15 +7,7 @@ from contextlib import suppress
from html import escape as html_escape
from http import HTTPStatus
from logging import Logger
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Optional,
Type,
cast,
)
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Optional, Tuple, Type, cast
import yarl
@ -37,25 +29,29 @@ from .web_log import AccessLogger
from .web_request import BaseRequest
from .web_response import Response, StreamResponse
__all__ = ('RequestHandler', 'RequestPayloadError', 'PayloadAccessError')
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
if TYPE_CHECKING: # pragma: no cover
from .web_server import Server # noqa
from .web_server import Server
_RequestFactory = Callable[[RawRequestMessage,
StreamReader,
'RequestHandler',
AbstractStreamWriter,
'asyncio.Task[None]'],
BaseRequest]
_RequestFactory = Callable[
[
RawRequestMessage,
StreamReader,
"RequestHandler",
AbstractStreamWriter,
"asyncio.Task[None]",
],
BaseRequest,
]
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
ERROR = RawRequestMessage(
'UNKNOWN', '/', HttpVersion10, {},
{}, True, False, False, False, yarl.URL('/'))
"UNKNOWN", "/", HttpVersion10, {}, {}, True, False, False, False, yarl.URL("/")
)
class RequestPayloadError(Exception):
@ -105,38 +101,69 @@ class RequestHandler(BaseProtocol):
:param int max_headers: Optional maximum header size
"""
KEEPALIVE_RESCHEDULE_DELAY = 1
__slots__ = ('_request_count', '_keepalive', '_manager',
'_request_handler', '_request_factory', '_tcp_keepalive',
'_keepalive_time', '_keepalive_handle', '_keepalive_timeout',
'_lingering_time', '_messages', '_message_tail',
'_waiter', '_error_handler', '_task_handler',
'_upgrade', '_payload_parser', '_request_parser',
'_reading_paused', 'logger', 'debug', 'access_log',
'access_logger', '_close', '_force_close')
__slots__ = (
"_request_count",
"_keepalive",
"_manager",
"_request_handler",
"_request_factory",
"_tcp_keepalive",
"_keepalive_time",
"_keepalive_handle",
"_keepalive_timeout",
"_lingering_time",
"_messages",
"_message_tail",
"_waiter",
"_error_handler",
"_task_handler",
"_upgrade",
"_payload_parser",
"_request_parser",
"_reading_paused",
"logger",
"debug",
"access_log",
"access_logger",
"_close",
"_force_close",
"_current_request",
)
def __init__(self, manager: 'Server', *,
loop: asyncio.AbstractEventLoop,
keepalive_timeout: float=75., # NGINX default is 75 secs
tcp_keepalive: bool=True,
logger: Logger=server_logger,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log: Logger=access_logger,
access_log_format: str=AccessLogger.LOG_FORMAT,
debug: bool=False,
max_line_size: int=8190,
max_headers: int=32768,
max_field_size: int=8190,
lingering_time: float=10.0):
def __init__(
self,
manager: "Server",
*,
loop: asyncio.AbstractEventLoop,
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
tcp_keepalive: bool = True,
logger: Logger = server_logger,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log: Logger = access_logger,
access_log_format: str = AccessLogger.LOG_FORMAT,
debug: bool = False,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
lingering_time: float = 10.0,
read_bufsize: int = 2 ** 16,
):
super().__init__(loop)
self._request_count = 0
self._keepalive = False
self._current_request = None # type: Optional[BaseRequest]
self._manager = manager # type: Optional[Server]
self._request_handler = manager.request_handler # type: Optional[_RequestHandler] # noqa
self._request_factory = manager.request_factory # type: Optional[_RequestFactory] # noqa
self._request_handler = (
manager.request_handler
) # type: Optional[_RequestHandler]
self._request_factory = (
manager.request_factory
) # type: Optional[_RequestFactory]
self._tcp_keepalive = tcp_keepalive
# placeholder to be replaced on keepalive timeout setup
@ -146,7 +173,7 @@ class RequestHandler(BaseProtocol):
self._lingering_time = float(lingering_time)
self._messages = deque() # type: Any # Python 3.5 has no typing.Deque
self._message_tail = b''
self._message_tail = b""
self._waiter = None # type: Optional[asyncio.Future[None]]
self._error_handler = None # type: Optional[asyncio.Task[None]]
@ -155,18 +182,22 @@ class RequestHandler(BaseProtocol):
self._upgrade = False
self._payload_parser = None # type: Any
self._request_parser = HttpRequestParser(
self, loop,
self,
loop,
read_bufsize,
max_line_size=max_line_size,
max_field_size=max_field_size,
max_headers=max_headers,
payload_exception=RequestPayloadError) # type: Optional[HttpRequestParser] # noqa
payload_exception=RequestPayloadError,
) # type: Optional[HttpRequestParser]
self.logger = logger
self.debug = debug
self.access_log = access_log
if access_log:
self.access_logger = access_log_class(
access_log, access_log_format) # type: Optional[AbstractAccessLogger] # noqa
access_log, access_log_format
) # type: Optional[AbstractAccessLogger]
else:
self.access_logger = None
@ -176,13 +207,14 @@ class RequestHandler(BaseProtocol):
def __repr__(self) -> str:
return "<{} {}>".format(
self.__class__.__name__,
'connected' if self.transport is not None else 'disconnected')
"connected" if self.transport is not None else "disconnected",
)
@property
def keepalive_timeout(self) -> float:
return self._keepalive_timeout
async def shutdown(self, timeout: Optional[float]=15.0) -> None:
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
"""Worker process is about to exit, we need cleanup everything and
stop accepting requests. It is especially important for keep-alive
connections."""
@ -197,12 +229,13 @@ class RequestHandler(BaseProtocol):
# wait for handlers
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
with CeilTimeout(timeout, loop=self._loop):
if (self._error_handler is not None and
not self._error_handler.done()):
if self._error_handler is not None and not self._error_handler.done():
await self._error_handler
if (self._task_handler is not None and
not self._task_handler.done()):
if self._current_request is not None:
self._current_request._cancel(asyncio.CancelledError())
if self._task_handler is not None and not self._task_handler.done():
await self._task_handler
# force-close non-idle handler
@ -240,11 +273,17 @@ class RequestHandler(BaseProtocol):
if self._keepalive_handle is not None:
self._keepalive_handle.cancel()
if self._task_handler is not None:
self._task_handler.cancel()
if self._current_request is not None:
if exc is None:
exc = ConnectionResetError("Connection lost")
self._current_request._cancel(exc)
if self._error_handler is not None:
self._error_handler.cancel()
if self._task_handler is not None:
self._task_handler.cancel()
if self._waiter is not None:
self._waiter.cancel()
self._task_handler = None
@ -260,7 +299,7 @@ class RequestHandler(BaseProtocol):
if self._message_tail:
self._payload_parser.feed_data(self._message_tail)
self._message_tail = b''
self._message_tail = b""
def eof_received(self) -> None:
pass
@ -277,15 +316,15 @@ class RequestHandler(BaseProtocol):
# something happened during parsing
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self._loop),
400, exc, exc.message))
StreamWriter(self, self._loop), 400, exc, exc.message
)
)
self.close()
except Exception as exc:
# 500: internal error
self._error_handler = self._loop.create_task(
self.handle_parse_error(
StreamWriter(self, self._loop),
500, exc))
self.handle_parse_error(StreamWriter(self, self._loop), 500, exc)
)
self.close()
else:
if messages:
@ -340,12 +379,11 @@ class RequestHandler(BaseProtocol):
self.transport.close()
self.transport = None
def log_access(self,
request: BaseRequest,
response: StreamResponse,
time: float) -> None:
def log_access(
self, request: BaseRequest, response: StreamResponse, time: float
) -> None:
if self.access_logger is not None:
self.access_logger.log(request, response, time)
self.access_logger.log(request, response, self._loop.time() - time)
def log_debug(self, *args: Any, **kw: Any) -> None:
if self.debug:
@ -369,7 +407,39 @@ class RequestHandler(BaseProtocol):
# not all request handlers are done,
# reschedule itself to next second
self._keepalive_handle = self._loop.call_later(
self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive)
self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
)
async def _handle_request(
self,
request: BaseRequest,
start_time: float,
) -> Tuple[StreamResponse, bool]:
assert self._request_handler is not None
try:
try:
self._current_request = request
resp = await self._request_handler(request)
finally:
self._current_request = None
except HTTPException as exc:
resp = Response(
status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
)
reset = await self.finish_response(request, resp, start_time)
except asyncio.CancelledError:
raise
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
reset = await self.finish_response(request, resp, start_time)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
reset = await self.finish_response(request, resp, start_time)
else:
reset = await self.finish_response(request, resp, start_time)
return resp, reset
async def start(self) -> None:
"""Process incoming request.
@ -403,87 +473,49 @@ class RequestHandler(BaseProtocol):
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
start = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(
message, payload, self, writer, handler)
request = self._request_factory(message, payload, self, writer, handler)
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(
self._request_handler(request))
task = self._loop.create_task(self._handle_request(request, start))
try:
resp = await task
except HTTPException as exc:
resp = exc
resp, reset = await task
except (asyncio.CancelledError, ConnectionError):
self.log_debug('Ignored premature client disconnection')
self.log_debug("Ignored premature client disconnection")
break
except asyncio.TimeoutError as exc:
self.log_debug('Request handler timed out.', exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
else:
# Deprecation warning (See #2415)
if getattr(resp, '__http_exception__', False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning)
# Deprecation warning (See #2415)
if getattr(resp, "__http_exception__", False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning,
)
# Drop the processed task from asyncio.Task.all_tasks() early
del task
if self.debug:
if not isinstance(resp, StreamResponse):
if resp is None:
raise RuntimeError("Missing return "
"statement on request handler")
else:
raise RuntimeError("Web-handler should return "
"a response instance, "
"got {!r}".format(resp))
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return "
"statement on request handler")
else:
raise RuntimeError("Web-handler should return "
"a response instance, "
"got {!r}".format(resp))
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionError:
self.log_debug('Ignored premature client disconnection 2')
if reset:
self.log_debug("Ignored premature client disconnection 2")
break
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
'Start lingering close timer for %s sec.',
lingering_time)
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(
asyncio.TimeoutError, asyncio.CancelledError):
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
# read and ignore
@ -492,25 +524,24 @@ class RequestHandler(BaseProtocol):
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug('Uncompleted request.')
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug('Ignored premature client disconnection ')
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception(
'Unhandled runtime exception', exc_info=exc)
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception('Unhandled exception', exc_info=exc)
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug('Ignored premature client disconnection.')
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
@ -519,8 +550,8 @@ class RequestHandler(BaseProtocol):
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout,
self._process_keepalive)
now + keepalive_timeout, self._process_keepalive
)
else:
break
@ -530,43 +561,79 @@ class RequestHandler(BaseProtocol):
if self.transport is not None and self._error_handler is None:
self.transport.close()
def handle_error(self,
request: BaseRequest,
status: int=500,
exc: Optional[BaseException]=None,
message: Optional[str]=None) -> StreamResponse:
async def finish_response(
self, request: BaseRequest, resp: StreamResponse, start_time: float
) -> bool:
"""
Prepare the response and write_eof, then log access. This has to
be called within the context of any exception so the access logger
can get exception information. Returns True if the client disconnects
prematurely.
"""
if self._request_parser is not None:
self._request_parser.set_upgraded(False)
self._upgrade = False
if self._message_tail:
self._request_parser.feed_data(self._message_tail)
self._message_tail = b""
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return " "statement on request handler")
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionError:
self.log_access(request, resp, start_time)
return True
else:
self.log_access(request, resp, start_time)
return False
def handle_error(
self,
request: BaseRequest,
status: int = 500,
exc: Optional[BaseException] = None,
message: Optional[str] = None,
) -> StreamResponse:
"""Handle errors.
Returns HTTP response with specific status code. Logs additional
information. It always closes current connection."""
self.log_exception("Error handling request", exc_info=exc)
ct = 'text/plain'
ct = "text/plain"
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
title = '{0.value} {0.phrase}'.format(
HTTPStatus.INTERNAL_SERVER_ERROR
)
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
tb = None
if self.debug:
with suppress(Exception):
tb = traceback.format_exc()
if 'text/html' in request.headers.get('Accept', ''):
if "text/html" in request.headers.get("Accept", ""):
if tb:
tb = html_escape(tb)
msg = '<h2>Traceback:</h2>\n<pre>{}</pre>'.format(tb)
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
message = (
"<html><head>"
"<title>{title}</title>"
"</head><body>\n<h1>{title}</h1>"
"\n{msg}\n</body></html>\n"
).format(title=title, msg=msg)
ct = 'text/html'
ct = "text/html"
else:
if tb:
msg = tb
message = title + '\n\n' + msg
message = title + "\n\n" + msg
resp = Response(status=status, text=message, content_type=ct)
resp.force_close()
@ -577,17 +644,18 @@ class RequestHandler(BaseProtocol):
return resp
async def handle_parse_error(self,
writer: AbstractStreamWriter,
status: int,
exc: Optional[BaseException]=None,
message: Optional[str]=None) -> None:
request = BaseRequest( # type: ignore
ERROR,
EMPTY_PAYLOAD,
self, writer,
current_task(),
self._loop)
async def handle_parse_error(
self,
writer: AbstractStreamWriter,
status: int,
exc: Optional[BaseException] = None,
message: Optional[str] = None,
) -> None:
task = current_task()
assert task is not None
request = BaseRequest(
ERROR, EMPTY_PAYLOAD, self, writer, task, self._loop # type: ignore
)
resp = self.handle_error(request, status, exc, message)
await resp.prepare(request)

View File

@ -10,7 +10,7 @@ import warnings
from email.utils import parsedate
from http.cookies import SimpleCookie
from types import MappingProxyType
from typing import ( # noqa
from typing import (
TYPE_CHECKING,
Any,
Dict,
@ -32,6 +32,7 @@ from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel
from .http_parser import RawRequestMessage
from .http_writer import HttpVersion
from .multipart import BodyPartReader, MultipartReader
from .streams import EmptyStreamReader, StreamReader
from .typedefs import (
@ -44,45 +45,46 @@ from .typedefs import (
from .web_exceptions import HTTPRequestEntityTooLarge
from .web_response import StreamResponse
__all__ = ('BaseRequest', 'FileField', 'Request')
__all__ = ("BaseRequest", "FileField", "Request")
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application # noqa
from .web_urldispatcher import UrlMappingMatchInfo # noqa
from .web_protocol import RequestHandler # noqa
from .web_app import Application
from .web_protocol import RequestHandler
from .web_urldispatcher import UrlMappingMatchInfo
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class FileField:
name = attr.ib(type=str)
filename = attr.ib(type=str)
file = attr.ib(type=io.BufferedReader)
content_type = attr.ib(type=str)
headers = attr.ib(type=CIMultiDictProxy) # type: CIMultiDictProxy[str]
name: str
filename: str
file: io.BufferedReader
content_type: str
headers: "CIMultiDictProxy[str]"
_TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
# '-' at the end to prevent interpretation as range in a char class
_TOKEN = r'[{tchar}]+'.format(tchar=_TCHAR)
_TOKEN = fr"[{_TCHAR}]+"
_QDTEXT = r'[{}]'.format(
r''.join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))))
_QDTEXT = r"[{}]".format(
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
)
# qdtext includes 0x5C to escape 0x5D ('\]')
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
_QUOTED_PAIR = r'\\[\t !-~]'
_QUOTED_PAIR = r"\\[\t !-~]"
_QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format(
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR)
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
)
_FORWARDED_PAIR = (
r'({token})=({token}|{quoted_string})(:\d{{1,4}})?'.format(
token=_TOKEN,
quoted_string=_QUOTED_STRING))
_FORWARDED_PAIR = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
token=_TOKEN, quoted_string=_QUOTED_STRING
)
_QUOTED_PAIR_REPLACE_RE = re.compile(r'\\([\t !-~])')
_QUOTED_PAIR_REPLACE_RE = re.compile(r"\\([\t !-~])")
# same pattern as _QUOTED_PAIR but contains a capture group
_FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR)
@ -94,25 +96,51 @@ _FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR)
class BaseRequest(MutableMapping[str, Any], HeadersMixin):
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT,
hdrs.METH_TRACE, hdrs.METH_DELETE}
POST_METHODS = {
hdrs.METH_PATCH,
hdrs.METH_POST,
hdrs.METH_PUT,
hdrs.METH_TRACE,
hdrs.METH_DELETE,
}
ATTRS = HeadersMixin.ATTRS | frozenset([
'_message', '_protocol', '_payload_writer', '_payload', '_headers',
'_method', '_version', '_rel_url', '_post', '_read_bytes',
'_state', '_cache', '_task', '_client_max_size', '_loop',
'_transport_sslcontext', '_transport_peername'])
ATTRS = HeadersMixin.ATTRS | frozenset(
[
"_message",
"_protocol",
"_payload_writer",
"_payload",
"_headers",
"_method",
"_version",
"_rel_url",
"_post",
"_read_bytes",
"_state",
"_cache",
"_task",
"_client_max_size",
"_loop",
"_transport_sslcontext",
"_transport_peername",
]
)
def __init__(self, message: RawRequestMessage,
payload: StreamReader, protocol: 'RequestHandler',
payload_writer: AbstractStreamWriter,
task: 'asyncio.Task[None]',
loop: asyncio.AbstractEventLoop,
*, client_max_size: int=1024**2,
state: Optional[Dict[str, Any]]=None,
scheme: Optional[str]=None,
host: Optional[str]=None,
remote: Optional[str]=None) -> None:
def __init__(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: "RequestHandler",
payload_writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
loop: asyncio.AbstractEventLoop,
*,
client_max_size: int = 1024 ** 2,
state: Optional[Dict[str, Any]] = None,
scheme: Optional[str] = None,
host: Optional[str] = None,
remote: Optional[str] = None,
) -> None:
if state is None:
state = {}
self._message = message
@ -124,7 +152,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._method = message.method
self._version = message.version
self._rel_url = message.url
self._post = None # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]] # noqa
self._post = (
None
) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
self._read_bytes = None # type: Optional[bytes]
self._state = state
@ -135,20 +165,26 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
transport = self._protocol.transport
assert transport is not None
self._transport_sslcontext = transport.get_extra_info('sslcontext')
self._transport_peername = transport.get_extra_info('peername')
self._transport_sslcontext = transport.get_extra_info("sslcontext")
self._transport_peername = transport.get_extra_info("peername")
if scheme is not None:
self._cache['scheme'] = scheme
self._cache["scheme"] = scheme
if host is not None:
self._cache['host'] = host
self._cache["host"] = host
if remote is not None:
self._cache['remote'] = remote
self._cache["remote"] = remote
def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel,
headers: LooseHeaders=sentinel, scheme: str=sentinel,
host: str=sentinel,
remote: str=sentinel) -> 'BaseRequest':
def clone(
self,
*,
method: str = sentinel,
rel_url: StrOrURL = sentinel,
headers: LooseHeaders = sentinel,
scheme: str = sentinel,
host: str = sentinel,
remote: str = sentinel,
) -> "BaseRequest":
"""Clone itself with replacement some attributes.
Creates and returns a new instance of Request object. If no parameters
@ -158,31 +194,31 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
"""
if self._read_bytes:
raise RuntimeError("Cannot clone request "
"after reading its content")
raise RuntimeError("Cannot clone request " "after reading its content")
dct = {} # type: Dict[str, Any]
if method is not sentinel:
dct['method'] = method
dct["method"] = method
if rel_url is not sentinel:
new_url = URL(rel_url)
dct['url'] = new_url
dct['path'] = str(new_url)
dct["url"] = new_url
dct["path"] = str(new_url)
if headers is not sentinel:
# a copy semantic
dct['headers'] = CIMultiDictProxy(CIMultiDict(headers))
dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8'))
for k, v in headers.items())
dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
dct["raw_headers"] = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
message = self._message._replace(**dct)
kwargs = {}
if scheme is not sentinel:
kwargs['scheme'] = scheme
kwargs["scheme"] = scheme
if host is not sentinel:
kwargs['host'] = host
kwargs["host"] = host
if remote is not sentinel:
kwargs['remote'] = remote
kwargs["remote"] = remote
return self.__class__(
message,
@ -193,14 +229,15 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._loop,
client_max_size=self._client_max_size,
state=self._state.copy(),
**kwargs)
**kwargs,
)
@property
def task(self) -> 'asyncio.Task[None]':
def task(self) -> "asyncio.Task[None]":
return self._task
@property
def protocol(self) -> 'RequestHandler':
def protocol(self) -> "RequestHandler":
return self._protocol
@property
@ -215,9 +252,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def message(self) -> RawRequestMessage:
warnings.warn("Request.message is deprecated",
DeprecationWarning,
stacklevel=3)
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
return self._message
@reify
@ -226,9 +261,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def loop(self) -> asyncio.AbstractEventLoop:
warnings.warn("request.loop property is deprecated",
DeprecationWarning,
stacklevel=2)
warnings.warn(
"request.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
# MutableMapping API
@ -253,7 +288,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def secure(self) -> bool:
"""A bool indicating if the request is handled with SSL."""
return self.scheme == 'https'
return self.scheme == "https"
@reify
def forwarded(self) -> Tuple[Mapping[str, str], ...]:
@ -284,37 +319,36 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
elems.append(types.MappingProxyType(elem))
while 0 <= pos < length:
match = _FORWARDED_PAIR_RE.match(field_value, pos)
if match is not None: # got a valid forwarded-pair
if match is not None: # got a valid forwarded-pair
if need_separator:
# bad syntax here, skip to next comma
pos = field_value.find(',', pos)
pos = field_value.find(",", pos)
else:
name, value, port = match.groups()
if value[0] == '"':
# quoted string: remove quotes and unescape
value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1',
value[1:-1])
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
if port:
value += port
elem[name.lower()] = value
pos += len(match.group(0))
need_separator = True
elif field_value[pos] == ',': # next forwarded-element
elif field_value[pos] == ",": # next forwarded-element
need_separator = False
elem = {}
elems.append(types.MappingProxyType(elem))
pos += 1
elif field_value[pos] == ';': # next forwarded-pair
elif field_value[pos] == ";": # next forwarded-pair
need_separator = False
pos += 1
elif field_value[pos] in ' \t':
elif field_value[pos] in " \t":
# Allow whitespace even between forwarded-pairs, though
# RFC 7239 doesn't. This simplifies code and is in line
# with Postel's law.
pos += 1
else:
# bad syntax here, skip to next comma
pos = field_value.find(',', pos)
pos = field_value.find(",", pos)
return tuple(elems)
@reify
@ -329,9 +363,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
'http' or 'https'.
"""
if self._transport_sslcontext:
return 'https'
return "https"
else:
return 'http'
return "http"
@reify
def method(self) -> str:
@ -342,7 +376,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._method
@reify
def version(self) -> Tuple[int, int]:
def version(self) -> HttpVersion:
"""Read only property for getting HTTP version of request.
Returns aiohttp.protocol.HttpVersion instance.
@ -402,7 +436,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
@reify
def raw_path(self) -> str:
""" The URL including raw *PATH INFO* without the host or scheme.
"""The URL including raw *PATH INFO* without the host or scheme.
Warning, the path is unquoted and may contains non valid URL characters
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
@ -410,7 +444,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._message.path
@reify
def query(self) -> 'MultiDictProxy[str]':
def query(self) -> "MultiDictProxy[str]":
"""A multidict with all the variables in the query string."""
return self._rel_url.query
@ -423,7 +457,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._rel_url.query_string
@reify
def headers(self) -> 'CIMultiDictProxy[str]':
def headers(self) -> "CIMultiDictProxy[str]":
"""A case-insensitive multidict proxy with all headers."""
return self._headers
@ -433,14 +467,12 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._message.raw_headers
@staticmethod
def _http_date(_date_str: str) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object
"""
def _http_date(_date_str: Optional[str]) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object"""
if _date_str is not None:
timetuple = parsedate(_date_str)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None
@reify
@ -478,10 +510,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
A read-only dictionary-like object.
"""
raw = self.headers.get(hdrs.COOKIE, '')
parsed = SimpleCookie(raw)
return MappingProxyType(
{key: val.value for key, val in parsed.items()})
raw = self.headers.get(hdrs.COOKIE, "")
parsed = SimpleCookie(raw) # type: SimpleCookie[str]
return MappingProxyType({key: val.value for key, val in parsed.items()})
@reify
def http_range(self) -> slice:
@ -494,7 +525,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
start, end = None, None
if rng is not None:
try:
pattern = r'^bytes=(\d*)-(\d*)$'
pattern = r"^bytes=(\d*)-(\d*)$"
start, end = re.findall(pattern, rng)[0]
except IndexError: # pattern was not found in header
raise ValueError("range not in acceptable format")
@ -512,10 +543,10 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
end += 1
if start >= end:
raise ValueError('start cannot be after end')
raise ValueError("start cannot be after end")
if start is end is None: # No valid range supplied
raise ValueError('No start or end of range specified')
raise ValueError("No start or end of range specified")
return slice(start, end, 1)
@ -528,8 +559,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
def has_body(self) -> bool:
"""Return True if request's HTTP BODY can be read, False otherwise."""
warnings.warn(
"Deprecated, use .can_read_body #2005",
DeprecationWarning, stacklevel=2)
"Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
)
return not self._payload.at_eof()
@property
@ -564,8 +595,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
body_size = len(body)
if body_size >= self._client_max_size:
raise HTTPRequestEntityTooLarge(
max_size=self._client_max_size,
actual_size=body_size
max_size=self._client_max_size, actual_size=body_size
)
if not chunk:
break
@ -575,10 +605,10 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or 'utf-8'
encoding = self.charset or "utf-8"
return bytes_body.decode(encoding)
async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any:
async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
"""Return BODY as JSON."""
body = await self.text()
return loads(body)
@ -587,7 +617,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
"""Return async iterator to process BODY as multipart."""
return MultipartReader(self._headers, self._payload)
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]':
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
@ -596,15 +626,17 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._post
content_type = self.content_type
if (content_type not in ('',
'application/x-www-form-urlencoded',
'multipart/form-data')):
if content_type not in (
"",
"application/x-www-form-urlencoded",
"multipart/form-data",
):
self._post = MultiDictProxy(MultiDict())
return self._post
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
if content_type == 'multipart/form-data':
if content_type == "multipart/form-data":
multipart = await self.multipart()
max_size = self._client_max_size
@ -614,66 +646,91 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
if isinstance(field, BodyPartReader):
if field.filename and field_ct:
assert field.name is not None
# Note that according to RFC 7578, the Content-Type header
# is optional, even for files, so we can't assume it's
# present.
# https://tools.ietf.org/html/rfc7578#section-4.4
if field.filename:
# store file in temp file
tmp = tempfile.TemporaryFile()
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
while chunk:
chunk = field.decode(chunk)
tmp.write(chunk)
size += len(chunk)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size,
actual_size=size
max_size=max_size, actual_size=size
)
chunk = await field.read_chunk(size=2**16)
chunk = await field.read_chunk(size=2 ** 16)
tmp.seek(0)
ff = FileField(field.name, field.filename,
cast(io.BufferedReader, tmp),
field_ct, field.headers)
if field_ct is None:
field_ct = "application/octet-stream"
ff = FileField(
field.name,
field.filename,
cast(io.BufferedReader, tmp),
field_ct,
field.headers,
)
out.add(field.name, ff)
else:
# deal with ordinary data
value = await field.read(decode=True)
if field_ct is None or \
field_ct.startswith('text/'):
charset = field.get_charset(default='utf-8')
if field_ct is None or field_ct.startswith("text/"):
charset = field.get_charset(default="utf-8")
out.add(field.name, value.decode(charset))
else:
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size,
actual_size=size
max_size=max_size, actual_size=size
)
else:
raise ValueError(
'To decode nested multipart you need '
'to use custom reader',
"To decode nested multipart you need " "to use custom reader",
)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or 'utf-8'
charset = self.charset or "utf-8"
out.extend(
parse_qsl(
data.rstrip().decode(charset),
keep_blank_values=True,
encoding=charset))
encoding=charset,
)
)
self._post = MultiDictProxy(out)
return self._post
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""Extra info from protocol transport"""
protocol = self._protocol
if protocol is None:
return default
transport = protocol.transport
if transport is None:
return default
return transport.get_extra_info(name, default)
def __repr__(self) -> str:
ascii_encodable_path = self.path.encode('ascii', 'backslashreplace') \
.decode('ascii')
return "<{} {} {} >".format(self.__class__.__name__,
self._method, ascii_encodable_path)
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
"ascii"
)
return "<{} {} {} >".format(
self.__class__.__name__, self._method, ascii_encodable_path
)
def __eq__(self, other: object) -> bool:
return id(self) == id(other)
@ -684,10 +741,13 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
async def _prepare_hook(self, response: StreamResponse) -> None:
return
def _cancel(self, exc: BaseException) -> None:
self._payload.set_exception(exc)
class Request(BaseRequest):
ATTRS = BaseRequest.ATTRS | frozenset(['_match_info'])
ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
@ -699,38 +759,48 @@ class Request(BaseRequest):
self._match_info = None # type: Optional[UrlMappingMatchInfo]
if DEBUG:
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn("Setting custom {}.{} attribute "
"is discouraged".format(self.__class__.__name__,
name),
DeprecationWarning,
stacklevel=2)
warnings.warn(
"Setting custom {}.{} attribute "
"is discouraged".format(self.__class__.__name__, name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
def clone(self, *, method: str=sentinel, rel_url:
StrOrURL=sentinel, headers: LooseHeaders=sentinel,
scheme: str=sentinel, host: str=sentinel, remote:
str=sentinel) -> 'Request':
ret = super().clone(method=method,
rel_url=rel_url,
headers=headers,
scheme=scheme,
host=host,
remote=remote)
def clone(
self,
*,
method: str = sentinel,
rel_url: StrOrURL = sentinel,
headers: LooseHeaders = sentinel,
scheme: str = sentinel,
host: str = sentinel,
remote: str = sentinel,
) -> "Request":
ret = super().clone(
method=method,
rel_url=rel_url,
headers=headers,
scheme=scheme,
host=host,
remote=remote,
)
new_ret = cast(Request, ret)
new_ret._match_info = self._match_info
return new_ret
@reify
def match_info(self) -> 'UrlMappingMatchInfo':
def match_info(self) -> "UrlMappingMatchInfo":
"""Result of route resolving."""
match_info = self._match_info
assert match_info is not None
return match_info
@property
def app(self) -> 'Application':
def app(self) -> "Application":
"""Application instance."""
match_info = self._match_info
assert match_info is not None
@ -743,7 +813,7 @@ class Request(BaseRequest):
lst = match_info.apps
app = self.app
idx = lst.index(app)
sublist = list(reversed(lst[:idx + 1]))
sublist = list(reversed(lst[: idx + 1]))
return ChainMapProxy(sublist)
async def _prepare_hook(self, response: StreamResponse) -> None:

View File

@ -1,5 +1,5 @@
import asyncio # noqa
import collections.abc # noqa
import asyncio
import collections.abc
import datetime
import enum
import json
@ -9,8 +9,8 @@ import warnings
import zlib
from concurrent.futures import Executor
from email.utils import parsedate
from http.cookies import SimpleCookie
from typing import ( # noqa
from http.cookies import Morsel, SimpleCookie
from typing import (
TYPE_CHECKING,
Any,
Dict,
@ -27,29 +27,36 @@ from multidict import CIMultiDict, istr
from . import hdrs, payload
from .abc import AbstractStreamWriter
from .helpers import HeadersMixin, rfc822_formatted_time, sentinel
from .helpers import PY_38, HeadersMixin, rfc822_formatted_time, sentinel
from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
from .payload import Payload
from .typedefs import JSONEncoder, LooseHeaders
__all__ = ('ContentCoding', 'StreamResponse', 'Response', 'json_response')
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest # noqa
from .web_request import BaseRequest
BaseClass = MutableMapping[str, Any]
else:
BaseClass = collections.abc.MutableMapping
if not PY_38:
# allow samesite to be used in python < 3.8
# already permitted in python 3.8, see https://bugs.python.org/issue29613
Morsel._reserved["samesite"] = "SameSite" # type: ignore
class ContentCoding(enum.Enum):
# The content codings that we have support for.
#
# Additional registered codings are listed at:
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
deflate = 'deflate'
gzip = 'gzip'
identity = 'identity'
deflate = "deflate"
gzip = "gzip"
identity = "identity"
############################################################
@ -61,16 +68,19 @@ class StreamResponse(BaseClass, HeadersMixin):
_length_check = True
def __init__(self, *,
status: int=200,
reason: Optional[str]=None,
headers: Optional[LooseHeaders]=None) -> None:
def __init__(
self,
*,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
) -> None:
self._body = None
self._keep_alive = None # type: Optional[bool]
self._chunked = False
self._compression = False
self._compression_force = None # type: Optional[ContentCoding]
self._cookies = SimpleCookie()
self._cookies = SimpleCookie() # type: SimpleCookie[str]
self._req = None # type: Optional[BaseRequest]
self._payload_writer = None # type: Optional[AbstractStreamWriter]
@ -90,8 +100,8 @@ class StreamResponse(BaseClass, HeadersMixin):
return self._payload_writer is not None
@property
def task(self) -> 'asyncio.Task[None]':
return getattr(self._req, 'task', None)
def task(self) -> "asyncio.Task[None]":
return getattr(self._req, "task", None)
@property
def status(self) -> int:
@ -109,19 +119,21 @@ class StreamResponse(BaseClass, HeadersMixin):
def reason(self) -> str:
return self._reason
def set_status(self, status: int,
reason: Optional[str]=None,
_RESPONSES: Mapping[int,
Tuple[str, str]]=RESPONSES) -> None:
assert not self.prepared, \
'Cannot change the response status code after ' \
'the headers have been sent'
def set_status(
self,
status: int,
reason: Optional[str] = None,
_RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
) -> None:
assert not self.prepared, (
"Cannot change the response status code after " "the headers have been sent"
)
self._status = int(status)
if reason is None:
try:
reason = _RESPONSES[self._status][0]
except Exception:
reason = ''
reason = ""
self._reason = reason
@property
@ -137,53 +149,61 @@ class StreamResponse(BaseClass, HeadersMixin):
@property
def output_length(self) -> int:
warnings.warn('output_length is deprecated', DeprecationWarning)
warnings.warn("output_length is deprecated", DeprecationWarning)
assert self._payload_writer
return self._payload_writer.buffer_size
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None:
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
"""Enables automatic chunked transfer encoding."""
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError("You can't enable chunked encoding when "
"a content length is set")
raise RuntimeError(
"You can't enable chunked encoding when " "a content length is set"
)
if chunk_size is not None:
warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
def enable_compression(self,
force: Optional[Union[bool, ContentCoding]]=None
) -> None:
def enable_compression(
self, force: Optional[Union[bool, ContentCoding]] = None
) -> None:
"""Enables response compression encoding."""
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn("Using boolean for force is deprecated #3318",
DeprecationWarning)
warnings.warn(
"Using boolean for force is deprecated #3318", DeprecationWarning
)
elif force is not None:
assert isinstance(force, ContentCoding), ("force should one of "
"None, bool or "
"ContentEncoding")
assert isinstance(force, ContentCoding), (
"force should one of " "None, bool or " "ContentEncoding"
)
self._compression = True
self._compression_force = force
@property
def headers(self) -> 'CIMultiDict[str]':
def headers(self) -> "CIMultiDict[str]":
return self._headers
@property
def cookies(self) -> SimpleCookie:
def cookies(self) -> "SimpleCookie[str]":
return self._cookies
def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
def set_cookie(
self,
name: str,
value: str,
*,
expires: Optional[str] = None,
domain: Optional[str] = None,
max_age: Optional[Union[int, str]] = None,
path: str = "/",
secure: Optional[bool] = None,
httponly: Optional[bool] = None,
version: Optional[str] = None,
samesite: Optional[str] = None,
) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
@ -191,7 +211,7 @@ class StreamResponse(BaseClass, HeadersMixin):
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
if old is not None and old.coded_value == "":
# deleted cookie
self._cookies.pop(name, None)
@ -199,39 +219,46 @@ class StreamResponse(BaseClass, HeadersMixin):
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
c["expires"] = expires
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
del c["expires"]
if domain is not None:
c['domain'] = domain
c["domain"] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c["max-age"] = str(max_age)
elif "max-age" in c:
del c["max-age"]
c['path'] = path
c["path"] = path
if secure is not None:
c['secure'] = secure
c["secure"] = secure
if httponly is not None:
c['httponly'] = httponly
c["httponly"] = httponly
if version is not None:
c['version'] = version
c["version"] = version
if samesite is not None:
c["samesite"] = samesite
def del_cookie(self, name: str, *,
domain: Optional[str]=None,
path: str='/') -> None:
def del_cookie(
self, name: str, *, domain: Optional[str] = None, path: str = "/"
) -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(name, '', max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain, path=path)
self.set_cookie(
name,
"",
max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain,
path=path,
)
@property
def content_length(self) -> Optional[int]:
@ -243,8 +270,9 @@ class StreamResponse(BaseClass, HeadersMixin):
if value is not None:
value = int(value)
if self._chunked:
raise RuntimeError("You can't set content length when "
"chunked encoding is enable")
raise RuntimeError(
"You can't set content length when " "chunked encoding is enable"
)
self._headers[hdrs.CONTENT_LENGTH] = str(value)
else:
self._headers.pop(hdrs.CONTENT_LENGTH, None)
@ -268,14 +296,16 @@ class StreamResponse(BaseClass, HeadersMixin):
@charset.setter
def charset(self, value: Optional[str]) -> None:
ctype = self.content_type # read header values if needed
if ctype == 'application/octet-stream':
raise RuntimeError("Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first")
if ctype == "application/octet-stream":
raise RuntimeError(
"Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first"
)
assert self._content_dict is not None
if value is None:
self._content_dict.pop('charset', None)
self._content_dict.pop("charset", None)
else:
self._content_dict['charset'] = str(value).lower()
self._content_dict["charset"] = str(value).lower()
self._generate_content_type_header()
@property
@ -288,34 +318,34 @@ class StreamResponse(BaseClass, HeadersMixin):
if httpdate is not None:
timetuple = parsedate(httpdate)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None
@last_modified.setter
def last_modified(self,
value: Optional[
Union[int, float, datetime.datetime, str]]) -> None:
def last_modified(
self, value: Optional[Union[int, float, datetime.datetime, str]]
) -> None:
if value is None:
self._headers.pop(hdrs.LAST_MODIFIED, None)
elif isinstance(value, (int, float)):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)))
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
)
elif isinstance(value, datetime.datetime):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple())
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
)
elif isinstance(value, str):
self._headers[hdrs.LAST_MODIFIED] = value
def _generate_content_type_header(
self,
CONTENT_TYPE: istr=hdrs.CONTENT_TYPE) -> None:
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
) -> None:
assert self._content_dict is not None
assert self._content_type is not None
params = '; '.join("{}={}".format(k, v)
for k, v in self._content_dict.items())
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
if params:
ctype = self._content_type + '; ' + params
ctype = self._content_type + "; " + params
else:
ctype = self._content_type
self._headers[CONTENT_TYPE] = ctype
@ -329,43 +359,49 @@ class StreamResponse(BaseClass, HeadersMixin):
# remove the header
self._headers.popall(hdrs.CONTENT_LENGTH, None)
async def _start_compression(self, request: 'BaseRequest') -> None:
async def _start_compression(self, request: "BaseRequest") -> None:
if self._compression_force:
await self._do_start_compression(self._compression_force)
else:
accept_encoding = request.headers.get(
hdrs.ACCEPT_ENCODING, '').lower()
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
for coding in ContentCoding:
if coding.value in accept_encoding:
await self._do_start_compression(coding)
return
async def prepare(
self,
request: 'BaseRequest'
) -> Optional[AbstractStreamWriter]:
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
if self._eof_sent:
return None
if self._payload_writer is not None:
return self._payload_writer
await request._prepare_hook(self)
return await self._start(request)
async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter:
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
self._req = request
writer = self._payload_writer = request._payload_writer
await self._prepare_headers()
await request._prepare_hook(self)
await self._write_headers()
return writer
async def _prepare_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
keep_alive = self._keep_alive
if keep_alive is None:
keep_alive = request.keep_alive
self._keep_alive = keep_alive
version = request.version
writer = self._payload_writer = request._payload_writer
headers = self._headers
for cookie in self._cookies.values():
value = cookie.output(header='')[1:]
value = cookie.output(header="")[1:]
headers.add(hdrs.SET_COOKIE, value)
if self._compression:
@ -375,9 +411,10 @@ class StreamResponse(BaseClass, HeadersMixin):
if version != HttpVersion11:
raise RuntimeError(
"Using chunked encoding is forbidden "
"for HTTP/{0.major}.{0.minor}".format(request.version))
"for HTTP/{0.major}.{0.minor}".format(request.version)
)
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = 'chunked'
headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
elif self._length_check:
@ -385,13 +422,17 @@ class StreamResponse(BaseClass, HeadersMixin):
if writer.length is None:
if version >= HttpVersion11:
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = 'chunked'
headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
else:
keep_alive = False
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
del headers[hdrs.CONTENT_LENGTH]
headers.setdefault(hdrs.CONTENT_TYPE, 'application/octet-stream')
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
@ -399,21 +440,27 @@ class StreamResponse(BaseClass, HeadersMixin):
if hdrs.CONNECTION not in headers:
if keep_alive:
if version == HttpVersion10:
headers[hdrs.CONNECTION] = 'keep-alive'
headers[hdrs.CONNECTION] = "keep-alive"
else:
if version == HttpVersion11:
headers[hdrs.CONNECTION] = 'close'
headers[hdrs.CONNECTION] = "close"
async def _write_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
# status line
status_line = 'HTTP/{}.{} {} {}'.format(
version[0], version[1], self._status, self._reason)
await writer.write_headers(status_line, headers)
return writer
version = request.version
status_line = "HTTP/{}.{} {} {}".format(
version[0], version[1], self._status, self._reason
)
await writer.write_headers(status_line, self._headers)
async def write(self, data: bytes) -> None:
assert isinstance(data, (bytes, bytearray, memoryview)), \
"data argument must be byte-ish (%r)" % type(data)
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
raise RuntimeError("Cannot call write() after write_eof()")
@ -424,22 +471,23 @@ class StreamResponse(BaseClass, HeadersMixin):
async def drain(self) -> None:
assert not self._eof_sent, "EOF has already been sent"
assert self._payload_writer is not None, \
"Response has not been started"
warnings.warn("drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2)
assert self._payload_writer is not None, "Response has not been started"
warnings.warn(
"drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2,
)
await self._payload_writer.drain()
async def write_eof(self, data: bytes=b'') -> None:
assert isinstance(data, (bytes, bytearray, memoryview)), \
"data argument must be byte-ish (%r)" % type(data)
async def write_eof(self, data: bytes = b"") -> None:
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
return
assert self._payload_writer is not None, \
"Response has not been started"
assert self._payload_writer is not None, "Response has not been started"
await self._payload_writer.write_eof(data)
self._eof_sent = True
@ -452,11 +500,10 @@ class StreamResponse(BaseClass, HeadersMixin):
info = "eof"
elif self.prepared:
assert self._req is not None
info = "{} {} ".format(self._req.method, self._req.path)
info = f"{self._req.method} {self._req.path} "
else:
info = "not prepared"
return "<{} {} {}>".format(self.__class__.__name__,
self.reason, info)
return f"<{self.__class__.__name__} {self.reason} {info}>"
def __getitem__(self, key: str) -> Any:
return self._state[key]
@ -481,17 +528,19 @@ class StreamResponse(BaseClass, HeadersMixin):
class Response(StreamResponse):
def __init__(self, *,
body: Any=None,
status: int=200,
reason: Optional[str]=None,
text: Optional[str]=None,
headers: Optional[LooseHeaders]=None,
content_type: Optional[str]=None,
charset: Optional[str]=None,
zlib_executor_size: Optional[int]=None,
zlib_executor: Executor=None) -> None:
def __init__(
self,
*,
body: Any = None,
status: int = 200,
reason: Optional[str] = None,
text: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
content_type: Optional[str] = None,
charset: Optional[str] = None,
zlib_executor_size: Optional[int] = None,
zlib_executor: Optional[Executor] = None,
) -> None:
if body is not None and text is not None:
raise ValueError("body and text are not allowed together")
@ -503,38 +552,39 @@ class Response(StreamResponse):
real_headers = headers # = cast('CIMultiDict[str]', headers)
if content_type is not None and "charset" in content_type:
raise ValueError("charset must not be in content_type "
"argument")
raise ValueError("charset must not be in content_type " "argument")
if text is not None:
if hdrs.CONTENT_TYPE in real_headers:
if content_type or charset:
raise ValueError("passing both Content-Type header and "
"content_type or charset params "
"is forbidden")
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
# fast path for filling headers
if not isinstance(text, str):
raise TypeError("text argument must be str (%r)" %
type(text))
raise TypeError("text argument must be str (%r)" % type(text))
if content_type is None:
content_type = 'text/plain'
content_type = "text/plain"
if charset is None:
charset = 'utf-8'
real_headers[hdrs.CONTENT_TYPE] = (
content_type + '; charset=' + charset)
charset = "utf-8"
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
body = text.encode(charset)
text = None
else:
if hdrs.CONTENT_TYPE in real_headers:
if content_type is not None or charset is not None:
raise ValueError("passing both Content-Type header and "
"content_type or charset params "
"is forbidden")
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
if content_type is not None:
if charset is not None:
content_type += '; charset=' + charset
content_type += "; charset=" + charset
real_headers[hdrs.CONTENT_TYPE] = content_type
super().__init__(status=status, reason=reason, headers=real_headers)
@ -553,9 +603,12 @@ class Response(StreamResponse):
return self._body
@body.setter
def body(self, body: bytes,
CONTENT_TYPE: istr=hdrs.CONTENT_TYPE,
CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH) -> None:
def body(
self,
body: bytes,
CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
) -> None:
if body is None:
self._body = None # type: Optional[bytes]
self._body_payload = False # type: bool
@ -566,7 +619,7 @@ class Response(StreamResponse):
try:
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
raise ValueError('Unsupported body type %r' % type(body))
raise ValueError("Unsupported body type %r" % type(body))
self._body_payload = True
@ -594,17 +647,18 @@ class Response(StreamResponse):
def text(self) -> Optional[str]:
if self._body is None:
return None
return self._body.decode(self.charset or 'utf-8')
return self._body.decode(self.charset or "utf-8")
@text.setter
def text(self, text: str) -> None:
assert text is None or isinstance(text, str), \
"text argument must be str (%r)" % type(text)
assert text is None or isinstance(
text, str
), "text argument must be str (%r)" % type(text)
if self.content_type == 'application/octet-stream':
self.content_type = 'text/plain'
if self.content_type == "application/octet-stream":
self.content_type = "text/plain"
if self.charset is None:
self.charset = 'utf-8'
self.charset = "utf-8"
self._body = text.encode(self.charset)
self._body_payload = False
@ -633,19 +687,18 @@ class Response(StreamResponse):
def content_length(self, value: Optional[int]) -> None:
raise RuntimeError("Content length is set automatically")
async def write_eof(self, data: bytes=b'') -> None:
async def write_eof(self, data: bytes = b"") -> None:
if self._eof_sent:
return
if self._compressed_body is None:
body = self._body # type: Optional[Union[bytes, Payload]]
else:
body = self._compressed_body
assert not data, "data arg is not supported, got {!r}".format(data)
assert not data, f"data arg is not supported, got {data!r}"
assert self._req is not None
assert self._payload_writer is not None
if body is not None:
if (self._req._method == hdrs.METH_HEAD or
self._status in [204, 304]):
if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
await super().write_eof()
elif self._body_payload:
payload = cast(Payload, body)
@ -656,22 +709,22 @@ class Response(StreamResponse):
else:
await super().write_eof()
async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter:
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
if not self._body_payload:
if self._body is not None:
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
else:
self._headers[hdrs.CONTENT_LENGTH] = '0'
self._headers[hdrs.CONTENT_LENGTH] = "0"
return await super()._start(request)
def _compress_body(self, zlib_mode: int) -> None:
assert zlib_mode > 0
compressobj = zlib.compressobj(wbits=zlib_mode)
body_in = self._body
assert body_in is not None
self._compressed_body = \
compressobj.compress(body_in) + compressobj.flush()
self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
async def _do_start_compression(self, coding: ContentCoding) -> None:
if self._body_payload or self._chunked:
@ -680,14 +733,18 @@ class Response(StreamResponse):
if coding != ContentCoding.identity:
# Instead of using _payload_writer.enable_compression,
# compress the whole body
zlib_mode = (16 + zlib.MAX_WBITS
if coding == ContentCoding.gzip else -zlib.MAX_WBITS)
zlib_mode = (
16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
)
body_in = self._body
assert body_in is not None
if self._zlib_executor_size is not None and \
len(body_in) > self._zlib_executor_size:
if (
self._zlib_executor_size is not None
and len(body_in) > self._zlib_executor_size
):
await asyncio.get_event_loop().run_in_executor(
self._zlib_executor, self._compress_body, zlib_mode)
self._zlib_executor, self._compress_body, zlib_mode
)
else:
self._compress_body(zlib_mode)
@ -698,20 +755,27 @@ class Response(StreamResponse):
self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
def json_response(data: Any=sentinel, *,
text: str=None,
body: bytes=None,
status: int=200,
reason: Optional[str]=None,
headers: LooseHeaders=None,
content_type: str='application/json',
dumps: JSONEncoder=json.dumps) -> Response:
def json_response(
data: Any = sentinel,
*,
text: Optional[str] = None,
body: Optional[bytes] = None,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
) -> Response:
if data is not sentinel:
if text or body:
raise ValueError(
"only one of data, text, or body should be specified"
)
raise ValueError("only one of data, text, or body should be specified")
else:
text = dumps(data)
return Response(text=text, body=body, status=status, reason=reason,
headers=headers, content_type=content_type)
return Response(
text=text,
body=body,
status=status,
reason=reason,
headers=headers,
content_type=content_type,
)

View File

@ -22,21 +22,34 @@ from .abc import AbstractView
from .typedefs import PathLike
if TYPE_CHECKING: # pragma: no cover
from .web_urldispatcher import UrlDispatcher
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import AbstractRoute, UrlDispatcher
else:
Request = StreamResponse = UrlDispatcher = None
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
__all__ = ('AbstractRouteDef', 'RouteDef', 'StaticDef', 'RouteTableDef',
'head', 'options', 'get', 'post', 'patch', 'put', 'delete',
'route', 'view', 'static')
__all__ = (
"AbstractRouteDef",
"RouteDef",
"StaticDef",
"RouteTableDef",
"head",
"options",
"get",
"post",
"patch",
"put",
"delete",
"route",
"view",
"static",
)
class AbstractRouteDef(abc.ABC):
@abc.abstractmethod
def register(self, router: UrlDispatcher) -> None:
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
pass # pragma: no cover
@ -44,50 +57,52 @@ _SimpleHandler = Callable[[Request], Awaitable[StreamResponse]]
_HandlerType = Union[Type[AbstractView], _SimpleHandler]
@attr.s(frozen=True, repr=False, slots=True)
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
class RouteDef(AbstractRouteDef):
method = attr.ib(type=str)
path = attr.ib(type=str)
handler = attr.ib() # type: _HandlerType
kwargs = attr.ib(type=Dict[str, Any])
method: str
path: str
handler: _HandlerType
kwargs: Dict[str, Any]
def __repr__(self) -> str:
info = []
for name, value in sorted(self.kwargs.items()):
info.append(", {}={!r}".format(name, value))
return ("<RouteDef {method} {path} -> {handler.__name__!r}"
"{info}>".format(method=self.method, path=self.path,
handler=self.handler, info=''.join(info)))
info.append(f", {name}={value!r}")
return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
method=self.method, path=self.path, handler=self.handler, info="".join(info)
)
def register(self, router: UrlDispatcher) -> None:
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
if self.method in hdrs.METH_ALL:
reg = getattr(router, 'add_'+self.method.lower())
reg(self.path, self.handler, **self.kwargs)
reg = getattr(router, "add_" + self.method.lower())
return [reg(self.path, self.handler, **self.kwargs)]
else:
router.add_route(self.method, self.path, self.handler,
**self.kwargs)
return [
router.add_route(self.method, self.path, self.handler, **self.kwargs)
]
@attr.s(frozen=True, repr=False, slots=True)
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
class StaticDef(AbstractRouteDef):
prefix = attr.ib(type=str)
path = attr.ib() # type: PathLike
kwargs = attr.ib(type=Dict[str, Any])
prefix: str
path: PathLike
kwargs: Dict[str, Any]
def __repr__(self) -> str:
info = []
for name, value in sorted(self.kwargs.items()):
info.append(", {}={!r}".format(name, value))
return ("<StaticDef {prefix} -> {path}"
"{info}>".format(prefix=self.prefix, path=self.path,
info=''.join(info)))
info.append(f", {name}={value!r}")
return "<StaticDef {prefix} -> {path}" "{info}>".format(
prefix=self.prefix, path=self.path, info="".join(info)
)
def register(self, router: UrlDispatcher) -> None:
router.add_static(self.prefix, self.path, **self.kwargs)
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
resource = router.add_static(self.prefix, self.path, **self.kwargs)
routes = resource.get_info().get("routes", {})
return list(routes.values())
def route(method: str, path: str, handler: _HandlerType,
**kwargs: Any) -> RouteDef:
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return RouteDef(method, path, handler, kwargs)
@ -99,10 +114,17 @@ def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
def get(path: str, handler: _HandlerType, *, name: Optional[str]=None,
allow_head: bool=True, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_GET, path, handler, name=name,
allow_head=allow_head, **kwargs)
def get(
path: str,
handler: _HandlerType,
*,
name: Optional[str] = None,
allow_head: bool = True,
**kwargs: Any,
) -> RouteDef:
return route(
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
)
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
@ -125,8 +147,7 @@ def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
return route(hdrs.METH_ANY, path, handler, **kwargs)
def static(prefix: str, path: PathLike,
**kwargs: Any) -> StaticDef:
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
return StaticDef(prefix, path, kwargs)
@ -135,6 +156,7 @@ _Deco = Callable[[_HandlerType], _HandlerType]
class RouteTableDef(Sequence[AbstractRouteDef]):
"""Route definition table"""
def __init__(self) -> None:
self._items = [] # type: List[AbstractRouteDef]
@ -142,12 +164,14 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
return "<RouteTableDef count={}>".format(len(self._items))
@overload
def __getitem__(self, index: int) -> AbstractRouteDef: ... # noqa
def __getitem__(self, index: int) -> AbstractRouteDef:
...
@overload # noqa
def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... # noqa
@overload
def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
...
def __getitem__(self, index): # type: ignore # noqa
def __getitem__(self, index): # type: ignore
return self._items[index]
def __iter__(self) -> Iterator[AbstractRouteDef]:
@ -159,13 +183,11 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
def __contains__(self, item: object) -> bool:
return item in self._items
def route(self,
method: str,
path: str,
**kwargs: Any) -> _Deco:
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
def inner(handler: _HandlerType) -> _HandlerType:
self._items.append(RouteDef(method, path, handler, kwargs))
return handler
return inner
def head(self, path: str, **kwargs: Any) -> _Deco:
@ -189,6 +211,5 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
def view(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_ANY, path, **kwargs)
def static(self, prefix: str, path: PathLike,
**kwargs: Any) -> None:
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
self._items.append(StaticDef(prefix, path, kwargs))

View File

@ -15,8 +15,17 @@ except ImportError:
SSLContext = object # type: ignore
__all__ = ('BaseSite', 'TCPSite', 'UnixSite', 'NamedPipeSite', 'SockSite',
'BaseRunner', 'AppRunner', 'ServerRunner', 'GracefulExit')
__all__ = (
"BaseSite",
"TCPSite",
"UnixSite",
"NamedPipeSite",
"SockSite",
"BaseRunner",
"AppRunner",
"ServerRunner",
"GracefulExit",
)
class GracefulExit(SystemExit):
@ -28,13 +37,16 @@ def _raise_graceful_exit() -> None:
class BaseSite(ABC):
__slots__ = ('_runner', '_shutdown_timeout', '_ssl_context', '_backlog',
'_server')
__slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
def __init__(self, runner: 'BaseRunner', *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128) -> None:
def __init__(
self,
runner: "BaseRunner",
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
if runner.server is None:
raise RuntimeError("Call runner.setup() before making a site")
self._runner = runner
@ -59,7 +71,7 @@ class BaseSite(ABC):
return # not started yet
self._server.close()
# named pipes do not have wait_closed property
if hasattr(self._server, 'wait_closed'):
if hasattr(self._server, "wait_closed"):
await self._server.wait_closed()
await self._runner.shutdown()
assert self._runner.server
@ -68,18 +80,26 @@ class BaseSite(ABC):
class TCPSite(BaseSite):
__slots__ = ('_host', '_port', '_reuse_address', '_reuse_port')
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
def __init__(self, runner: 'BaseRunner',
host: str=None, port: int=None, *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128, reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
super().__init__(runner, shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog)
if host is None:
host = "0.0.0.0"
def __init__(
self,
runner: "BaseRunner",
host: Optional[str] = None,
port: Optional[int] = None,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._host = host
if port is None:
port = 8443 if self._ssl_context else 8080
@ -89,36 +109,50 @@ class TCPSite(BaseSite):
@property
def name(self) -> str:
scheme = 'https' if self._ssl_context else 'http'
return str(URL.build(scheme=scheme, host=self._host, port=self._port))
scheme = "https" if self._ssl_context else "http"
host = "0.0.0.0" if self._host is None else self._host
return str(URL.build(scheme=scheme, host=host, port=self._port))
async def start(self) -> None:
await super().start()
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_server( # type: ignore
server, self._host, self._port,
ssl=self._ssl_context, backlog=self._backlog,
self._server = await loop.create_server(
server,
self._host,
self._port,
ssl=self._ssl_context,
backlog=self._backlog,
reuse_address=self._reuse_address,
reuse_port=self._reuse_port)
reuse_port=self._reuse_port,
)
class UnixSite(BaseSite):
__slots__ = ('_path', )
__slots__ = ("_path",)
def __init__(self, runner: 'BaseRunner', path: str, *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128) -> None:
super().__init__(runner, shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog)
def __init__(
self,
runner: "BaseRunner",
path: str,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._path = path
@property
def name(self) -> str:
scheme = 'https' if self._ssl_context else 'http'
return '{}://unix:{}:'.format(scheme, self._path)
scheme = "https" if self._ssl_context else "http"
return f"{scheme}://unix:{self._path}:"
async def start(self) -> None:
await super().start()
@ -126,19 +160,21 @@ class UnixSite(BaseSite):
server = self._runner.server
assert server is not None
self._server = await loop.create_unix_server(
server, self._path,
ssl=self._ssl_context, backlog=self._backlog)
server, self._path, ssl=self._ssl_context, backlog=self._backlog
)
class NamedPipeSite(BaseSite):
__slots__ = ('_path', )
__slots__ = ("_path",)
def __init__(self, runner: 'BaseRunner', path: str, *,
shutdown_timeout: float=60.0) -> None:
def __init__(
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
) -> None:
loop = asyncio.get_event_loop()
if not isinstance(loop, asyncio.ProactorEventLoop): # type: ignore
raise RuntimeError("Named Pipes only available in proactor"
"loop under windows")
raise RuntimeError(
"Named Pipes only available in proactor" "loop under windows"
)
super().__init__(runner, shutdown_timeout=shutdown_timeout)
self._path = path
@ -151,25 +187,32 @@ class NamedPipeSite(BaseSite):
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
_server = await loop.start_serving_pipe( # type: ignore
server, self._path
)
_server = await loop.start_serving_pipe(server, self._path) # type: ignore
self._server = _server[0]
class SockSite(BaseSite):
__slots__ = ('_sock', '_name')
__slots__ = ("_sock", "_name")
def __init__(self, runner: 'BaseRunner', sock: socket.socket, *,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
backlog: int=128) -> None:
super().__init__(runner, shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context, backlog=backlog)
def __init__(
self,
runner: "BaseRunner",
sock: socket.socket,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._sock = sock
scheme = 'https' if self._ssl_context else 'http'
if hasattr(socket, 'AF_UNIX') and sock.family == socket.AF_UNIX:
name = '{}://unix:{}:'.format(scheme, sock.getsockname())
scheme = "https" if self._ssl_context else "http"
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
name = f"{scheme}://unix:{sock.getsockname()}:"
else:
host, port = sock.getsockname()[:2]
name = str(URL.build(scheme=scheme, host=host, port=port))
@ -184,15 +227,15 @@ class SockSite(BaseSite):
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_server( # type: ignore
server, sock=self._sock,
ssl=self._ssl_context, backlog=self._backlog)
self._server = await loop.create_server(
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
)
class BaseRunner(ABC):
__slots__ = ('_handle_signals', '_kwargs', '_server', '_sites')
__slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
def __init__(self, *, handle_signals: bool=False, **kwargs: Any) -> None:
def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
self._handle_signals = handle_signals
self._kwargs = kwargs
self._server = None # type: Optional[Server]
@ -203,8 +246,8 @@ class BaseRunner(ABC):
return self._server
@property
def addresses(self) -> List[str]:
ret = [] # type: List[str]
def addresses(self) -> List[Any]:
ret = [] # type: List[Any]
for site in self._sites:
server = site._server
if server is not None:
@ -268,29 +311,27 @@ class BaseRunner(ABC):
def _reg_site(self, site: BaseSite) -> None:
if site in self._sites:
raise RuntimeError("Site {} is already registered in runner {}"
.format(site, self))
raise RuntimeError(f"Site {site} is already registered in runner {self}")
self._sites.append(site)
def _check_site(self, site: BaseSite) -> None:
if site not in self._sites:
raise RuntimeError("Site {} is not registered in runner {}"
.format(site, self))
raise RuntimeError(f"Site {site} is not registered in runner {self}")
def _unreg_site(self, site: BaseSite) -> None:
if site not in self._sites:
raise RuntimeError("Site {} is not registered in runner {}"
.format(site, self))
raise RuntimeError(f"Site {site} is not registered in runner {self}")
self._sites.remove(site)
class ServerRunner(BaseRunner):
"""Low-level web server runner"""
__slots__ = ('_web_server',)
__slots__ = ("_web_server",)
def __init__(self, web_server: Server, *,
handle_signals: bool=False, **kwargs: Any) -> None:
def __init__(
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
) -> None:
super().__init__(handle_signals=handle_signals, **kwargs)
self._web_server = web_server
@ -307,14 +348,17 @@ class ServerRunner(BaseRunner):
class AppRunner(BaseRunner):
"""Web Application runner"""
__slots__ = ('_app',)
__slots__ = ("_app",)
def __init__(self, app: Application, *,
handle_signals: bool=False, **kwargs: Any) -> None:
def __init__(
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
) -> None:
super().__init__(handle_signals=handle_signals, **kwargs)
if not isinstance(app, Application):
raise TypeError("The first argument should be web.Application "
"instance, got {!r}".format(app))
raise TypeError(
"The first argument should be web.Application "
"instance, got {!r}".format(app)
)
self._app = app
@property

View File

@ -9,17 +9,18 @@ from .streams import StreamReader
from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
from .web_request import BaseRequest
__all__ = ('Server',)
__all__ = ("Server",)
class Server:
def __init__(self,
handler: _RequestHandler,
*,
request_factory: Optional[_RequestFactory]=None,
loop: Optional[asyncio.AbstractEventLoop]=None,
**kwargs: Any) -> None:
def __init__(
self,
handler: _RequestHandler,
*,
request_factory: Optional[_RequestFactory] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any
) -> None:
self._loop = get_running_loop(loop)
self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
self._kwargs = kwargs
@ -31,26 +32,30 @@ class Server:
def connections(self) -> List[RequestHandler]:
return list(self._connections.keys())
def connection_made(self, handler: RequestHandler,
transport: asyncio.Transport) -> None:
def connection_made(
self, handler: RequestHandler, transport: asyncio.Transport
) -> None:
self._connections[handler] = transport
def connection_lost(self, handler: RequestHandler,
exc: Optional[BaseException]=None) -> None:
def connection_lost(
self, handler: RequestHandler, exc: Optional[BaseException] = None
) -> None:
if handler in self._connections:
del self._connections[handler]
def _make_request(self, message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: 'asyncio.Task[None]') -> BaseRequest:
return BaseRequest(
message, payload, protocol, writer, task, self._loop)
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
) -> BaseRequest:
return BaseRequest(message, payload, protocol, writer, task, self._loop)
async def shutdown(self, timeout: Optional[float]=None) -> None:
async def shutdown(self, timeout: Optional[float] = None) -> None:
coros = [conn.shutdown(timeout) for conn in self._connections]
await asyncio.gather(*coros, loop=self._loop)
await asyncio.gather(*coros)
self._connections.clear()
def __call__(self) -> RequestHandler:

File diff suppressed because it is too large Load Diff

View File

@ -20,9 +20,10 @@ from .http import (
WebSocketReader,
WebSocketWriter,
WSMessage,
WSMsgType as WSMsgType,
ws_ext_gen,
ws_ext_parse,
)
from .http import WSMsgType as WSMsgType
from .http import ws_ext_gen, ws_ext_parse
from .log import ws_logger
from .streams import EofStream, FlowControlDataQueue
from .typedefs import JSONDecoder, JSONEncoder
@ -30,15 +31,19 @@ from .web_exceptions import HTTPBadRequest, HTTPException
from .web_request import BaseRequest
from .web_response import StreamResponse
__all__ = ('WebSocketResponse', 'WebSocketReady', 'WSMsgType',)
__all__ = (
"WebSocketResponse",
"WebSocketReady",
"WSMsgType",
)
THRESHOLD_CONNLOST_ACCESS = 5
@attr.s(frozen=True, slots=True)
@attr.s(auto_attribs=True, frozen=True, slots=True)
class WebSocketReady:
ok = attr.ib(type=bool)
protocol = attr.ib(type=Optional[str])
ok: bool
protocol: Optional[str]
def __bool__(self) -> bool:
return self.ok
@ -48,12 +53,18 @@ class WebSocketResponse(StreamResponse):
_length_check = False
def __init__(self, *,
timeout: float=10.0, receive_timeout: Optional[float]=None,
autoclose: bool=True, autoping: bool=True,
heartbeat: Optional[float]=None,
protocols: Iterable[str]=(),
compress: bool=True, max_msg_size: int=4*1024*1024) -> None:
def __init__(
self,
*,
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
protocols: Iterable[str] = (),
compress: bool = True,
max_msg_size: int = 4 * 1024 * 1024,
) -> None:
super().__init__(status=101)
self._protocols = protocols
self._ws_protocol = None # type: Optional[str]
@ -92,7 +103,8 @@ class WebSocketResponse(StreamResponse):
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop)
self._send_heartbeat, self._heartbeat, self._loop
)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
@ -104,7 +116,8 @@ class WebSocketResponse(StreamResponse):
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop)
self._pong_not_received, self._pong_heartbeat, self._loop
)
def _pong_not_received(self) -> None:
if self._req is not None and self._req.transport is not None:
@ -125,27 +138,32 @@ class WebSocketResponse(StreamResponse):
await payload_writer.drain()
return payload_writer
def _handshake(self, request: BaseRequest) -> Tuple['CIMultiDict[str]',
str,
bool,
bool]:
def _handshake(
self, request: BaseRequest
) -> Tuple["CIMultiDict[str]", str, bool, bool]:
headers = request.headers
if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip():
if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
raise HTTPBadRequest(
text=('No WebSocket UPGRADE hdr: {}\n Can '
'"Upgrade" only to "WebSocket".')
.format(headers.get(hdrs.UPGRADE)))
text=(
"No WebSocket UPGRADE hdr: {}\n Can "
'"Upgrade" only to "WebSocket".'
).format(headers.get(hdrs.UPGRADE))
)
if 'upgrade' not in headers.get(hdrs.CONNECTION, '').lower():
if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
raise HTTPBadRequest(
text='No CONNECTION upgrade hdr: {}'.format(
headers.get(hdrs.CONNECTION)))
text="No CONNECTION upgrade hdr: {}".format(
headers.get(hdrs.CONNECTION)
)
)
# find common sub-protocol between client and server
protocol = None
if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
req_protocols = [str(proto.strip()) for proto in
headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')]
req_protocols = [
str(proto.strip())
for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
]
for proto in req_protocols:
if proto in self._protocols:
@ -154,31 +172,34 @@ class WebSocketResponse(StreamResponse):
else:
# No overlap found: Return no protocol as per spec
ws_logger.warning(
'Client protocols %r dont overlap server-known ones %r',
req_protocols, self._protocols)
"Client protocols %r dont overlap server-known ones %r",
req_protocols,
self._protocols,
)
# check supported version
version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, '')
if version not in ('13', '8', '7'):
raise HTTPBadRequest(
text='Unsupported version: {}'.format(version))
version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
if version not in ("13", "8", "7"):
raise HTTPBadRequest(text=f"Unsupported version: {version}")
# check client handshake for validity
key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
try:
if not key or len(base64.b64decode(key)) != 16:
raise HTTPBadRequest(
text='Handshake error: {!r}'.format(key))
raise HTTPBadRequest(text=f"Handshake error: {key!r}")
except binascii.Error:
raise HTTPBadRequest(
text='Handshake error: {!r}'.format(key)) from None
raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
accept_val = base64.b64encode(
hashlib.sha1(key.encode() + WS_KEY).digest()).decode()
hashlib.sha1(key.encode() + WS_KEY).digest()
).decode()
response_headers = CIMultiDict( # type: ignore
{hdrs.UPGRADE: 'websocket',
hdrs.CONNECTION: 'upgrade',
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val})
{
hdrs.UPGRADE: "websocket", # type: ignore
hdrs.CONNECTION: "upgrade",
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
}
)
notakeover = False
compress = 0
@ -188,24 +209,19 @@ class WebSocketResponse(StreamResponse):
# If something happened, just drop compress extension
compress, notakeover = ws_ext_parse(extensions, isserver=True)
if compress:
enabledext = ws_ext_gen(compress=compress, isserver=True,
server_notakeover=notakeover)
enabledext = ws_ext_gen(
compress=compress, isserver=True, server_notakeover=notakeover
)
response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
if protocol:
response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
return (response_headers, # type: ignore
protocol,
compress,
notakeover)
return (response_headers, protocol, compress, notakeover) # type: ignore
def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
self._loop = request._loop
headers, protocol, compress, notakeover = self._handshake(
request)
self._reset_heartbeat()
headers, protocol, compress, notakeover = self._handshake(request)
self.set_status(101)
self.headers.update(headers)
@ -213,29 +229,32 @@ class WebSocketResponse(StreamResponse):
self._compress = compress
transport = request._protocol.transport
assert transport is not None
writer = WebSocketWriter(request._protocol,
transport,
compress=compress,
notakeover=notakeover)
writer = WebSocketWriter(
request._protocol, transport, compress=compress, notakeover=notakeover
)
return protocol, writer
def _post_start(self, request: BaseRequest,
protocol: str, writer: WebSocketWriter) -> None:
def _post_start(
self, request: BaseRequest, protocol: str, writer: WebSocketWriter
) -> None:
self._ws_protocol = protocol
self._writer = writer
self._reset_heartbeat()
loop = self._loop
assert loop is not None
self._reader = FlowControlDataQueue(
request._protocol, limit=2 ** 16, loop=loop)
request.protocol.set_parser(WebSocketReader(
self._reader, self._max_msg_size, compress=self._compress))
self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
request.protocol.set_parser(
WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
)
# disable HTTP keepalive for WebSocket
request.protocol.keep_alive(False)
def can_prepare(self, request: BaseRequest) -> WebSocketReady:
if self._writer is not None:
raise RuntimeError('Already started')
raise RuntimeError("Already started")
try:
_, protocol, _, _ = self._handshake(request)
except HTTPException:
@ -262,35 +281,38 @@ class WebSocketResponse(StreamResponse):
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes=b'') -> None:
async def ping(self, message: bytes = b"") -> None:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
await self._writer.ping(message)
async def pong(self, message: bytes=b'') -> None:
async def pong(self, message: bytes = b"") -> None:
# unsolicited pong
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
await self._writer.pong(message)
async def send_str(self, data: str, compress: Optional[bool]=None) -> None:
async def send_str(self, data: str, compress: Optional[bool] = None) -> None:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
if not isinstance(data, str):
raise TypeError('data argument must be str (%r)' % type(data))
raise TypeError("data argument must be str (%r)" % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes,
compress: Optional[bool]=None) -> None:
async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)' %
type(data))
raise TypeError("data argument must be byte-ish (%r)" % type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(self, data: Any, compress: Optional[bool]=None, *,
dumps: JSONEncoder=json.dumps) -> None:
async def send_json(
self,
data: Any,
compress: Optional[bool] = None,
*,
dumps: JSONEncoder = json.dumps,
) -> None:
await self.send_str(dumps(data), compress=compress)
async def write_eof(self) -> None: # type: ignore
@ -302,9 +324,9 @@ class WebSocketResponse(StreamResponse):
await self.close()
self._eof_sent = True
async def close(self, *, code: int=1000, message: bytes=b'') -> bool:
async def close(self, *, code: int = 1000, message: bytes = b"") -> bool:
if self._writer is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
self._cancel_heartbeat()
reader = self._reader
@ -357,21 +379,20 @@ class WebSocketResponse(StreamResponse):
else:
return False
async def receive(self, timeout: Optional[float]=None) -> WSMessage:
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
if self._reader is None:
raise RuntimeError('Call .prepare() first')
raise RuntimeError("Call .prepare() first")
loop = self._loop
assert loop is not None
while True:
if self._waiting is not None:
raise RuntimeError(
'Concurrent call to receive() is not allowed')
raise RuntimeError("Concurrent call to receive() is not allowed")
if self._closed:
self._conn_lost += 1
if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
raise RuntimeError('WebSocket connection is closed.')
raise RuntimeError("WebSocket connection is closed.")
return WS_CLOSED_MESSAGE
elif self._closing:
return WS_CLOSING_MESSAGE
@ -380,7 +401,8 @@ class WebSocketResponse(StreamResponse):
self._waiting = loop.create_future()
try:
with async_timeout.timeout(
timeout or self._receive_timeout, loop=self._loop):
timeout or self._receive_timeout, loop=self._loop
):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
@ -420,37 +442,40 @@ class WebSocketResponse(StreamResponse):
return msg
async def receive_str(self, *, timeout: Optional[float]=None) -> str:
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not WSMsgType.TEXT".format(
msg.type, msg.data))
msg.type, msg.data
)
)
return msg.data
async def receive_bytes(self, *, timeout: Optional[float]=None) -> bytes:
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(
"Received message {}:{!r} is not bytes".format(msg.type,
msg.data))
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
return msg.data
async def receive_json(self, *, loads: JSONDecoder=json.loads,
timeout: Optional[float]=None) -> Any:
async def receive_json(
self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
async def write(self, data: bytes) -> None:
raise RuntimeError("Cannot call .write() for websocket")
def __aiter__(self) -> 'WebSocketResponse':
def __aiter__(self) -> "WebSocketResponse":
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE,
WSMsgType.CLOSING,
WSMsgType.CLOSED):
raise StopAsyncIteration # NOQA
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
raise StopAsyncIteration
return msg
def _cancel(self, exc: BaseException) -> None:
if self._reader is not None:
self._reader.set_exception(exc)

View File

@ -19,15 +19,14 @@ from .web_log import AccessLogger
try:
import ssl
SSLContext = ssl.SSLContext # noqa
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore
SSLContext = object # type: ignore
__all__ = ('GunicornWebWorker',
'GunicornUVLoopWebWorker',
'GunicornTokioWebWorker')
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
class GunicornWebWorker(base.Worker):
@ -70,16 +69,18 @@ class GunicornWebWorker(base.Worker):
elif asyncio.iscoroutinefunction(self.wsgi):
app = await self.wsgi()
else:
raise RuntimeError("wsgi app should be either Application or "
"async function returning Application, got {}"
.format(self.wsgi))
raise RuntimeError(
"wsgi app should be either Application or "
"async function returning Application, got {}".format(self.wsgi)
)
access_log = self.log.access_log if self.cfg.accesslog else None
runner = web.AppRunner(app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(
self.cfg.access_log_format))
runner = web.AppRunner(
app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(self.cfg.access_log_format),
)
await runner.setup()
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
@ -90,8 +91,11 @@ class GunicornWebWorker(base.Worker):
assert server is not None
for sock in self.sockets:
site = web.SockSite(
runner, sock, ssl_context=ctx,
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95)
runner,
sock,
ssl_context=ctx,
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await site.start()
# If our parent changed then we shut down.
@ -115,7 +119,7 @@ class GunicornWebWorker(base.Worker):
await runner.cleanup()
def _wait_next_notify(self) -> 'asyncio.Future[bool]':
def _wait_next_notify(self) -> "asyncio.Future[bool]":
self._notify_waiter_done()
loop = self.loop
@ -125,7 +129,9 @@ class GunicornWebWorker(base.Worker):
return waiter
def _notify_waiter_done(self, waiter: 'asyncio.Future[bool]'=None) -> None:
def _notify_waiter_done(
self, waiter: Optional["asyncio.Future[bool]"] = None
) -> None:
if waiter is None:
waiter = self._notify_waiter
if waiter is not None:
@ -137,23 +143,29 @@ class GunicornWebWorker(base.Worker):
def init_signals(self) -> None:
# Set up signals through the event loop API.
self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit,
signal.SIGQUIT, None)
self.loop.add_signal_handler(
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
)
self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit,
signal.SIGTERM, None)
self.loop.add_signal_handler(
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
)
self.loop.add_signal_handler(signal.SIGINT, self.handle_quit,
signal.SIGINT, None)
self.loop.add_signal_handler(
signal.SIGINT, self.handle_quit, signal.SIGINT, None
)
self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch,
signal.SIGWINCH, None)
self.loop.add_signal_handler(
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
)
self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1,
signal.SIGUSR1, None)
self.loop.add_signal_handler(
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
)
self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort,
signal.SIGABRT, None)
self.loop.add_signal_handler(
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
@ -176,13 +188,13 @@ class GunicornWebWorker(base.Worker):
sys.exit(1)
@staticmethod
def _create_ssl_context(cfg: Any) -> 'SSLContext':
""" Creates SSLContext instance for usage in asyncio.create_server.
def _create_ssl_context(cfg: Any) -> "SSLContext":
"""Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
if ssl is None: # pragma: no cover
raise RuntimeError('SSL is not supported.')
raise RuntimeError("SSL is not supported.")
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
@ -196,7 +208,7 @@ class GunicornWebWorker(base.Worker):
def _get_valid_log_format(self, source_format: str) -> str:
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
return self.DEFAULT_AIOHTTP_LOG_FORMAT
elif re.search(r'%\([^\)]+\)', source_format):
elif re.search(r"%\([^\)]+\)", source_format):
raise ValueError(
"Gunicorn's style options in form of `%(name)s` are not "
"supported for the log formatting. Please use aiohttp's "
@ -209,7 +221,6 @@ class GunicornWebWorker(base.Worker):
class GunicornUVLoopWebWorker(GunicornWebWorker):
def init_process(self) -> None:
import uvloop
@ -226,7 +237,6 @@ class GunicornUVLoopWebWorker(GunicornWebWorker):
class GunicornTokioWebWorker(GunicornWebWorker):
def init_process(self) -> None: # pragma: no cover
import tokio

View File

@ -1,5 +1,6 @@
import logging
import sys
import traceback
from datetime import datetime, timedelta
from traceback import format_tb
@ -33,6 +34,7 @@ async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
exception=exc, traceback=formatted_tb))
logger.exception('Job "%s" raised an exception', job)
traceback.clear_frames(tb)
else:
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
retval=retval))

View File

@ -3,6 +3,11 @@ import concurrent.futures
from apscheduler.executors.base import BaseExecutor, run_job
try:
from concurrent.futures.process import BrokenProcessPool
except ImportError:
BrokenProcessPool = None
class BasePoolExecutor(BaseExecutor):
@abstractmethod
@ -19,7 +24,13 @@ class BasePoolExecutor(BaseExecutor):
else:
self._run_job_success(job.id, f.result())
f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name)
try:
f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name)
except BrokenProcessPool:
self._logger.warning('Process pool is broken; replacing pool with a fresh instance')
self._pool = self._pool.__class__(self._pool._max_workers)
f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name)
f.add_done_callback(callback)
def shutdown(self, wait=True):

View File

@ -28,7 +28,7 @@ class Job(object):
:var trigger: the trigger object that controls the schedule of this job
:var str executor: the name of the executor that will run this job
:var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to
be late
be late (``None`` means "allow the job to run no matter how late it is")
:var int max_instances: the maximum number of concurrently executing instances allowed for this
job
:var datetime.datetime next_run_time: the next scheduled run time of this job
@ -40,7 +40,7 @@ class Job(object):
__slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref',
'args', 'kwargs', 'name', 'misfire_grace_time', 'coalesce', 'max_instances',
'next_run_time')
'next_run_time', '__weakref__')
def __init__(self, scheduler, id=None, **kwargs):
super(Job, self).__init__()
@ -242,8 +242,9 @@ class Job(object):
# Instance methods cannot survive serialization as-is, so store the "self" argument
# explicitly
if ismethod(self.func) and not isclass(self.func.__self__):
args = (self.func.__self__,) + tuple(self.args)
func = self.func
if ismethod(func) and not isclass(func.__self__) and obj_to_ref(func) == self.func_ref:
args = (func.__self__,) + tuple(self.args)
else:
args = self.args

View File

@ -54,7 +54,7 @@ class MongoDBJobStore(BaseJobStore):
def start(self, scheduler, alias):
super(MongoDBJobStore, self).start(scheduler, alias)
self.collection.ensure_index('next_run_time', sparse=True)
self.collection.create_index('next_run_time', sparse=True)
@property
def connection(self):
@ -83,7 +83,7 @@ class MongoDBJobStore(BaseJobStore):
def add_job(self, job):
try:
self.collection.insert({
self.collection.insert_one({
'_id': job.id,
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
@ -96,13 +96,13 @@ class MongoDBJobStore(BaseJobStore):
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
}
result = self.collection.update({'_id': job.id}, {'$set': changes})
if result and result['n'] == 0:
result = self.collection.update_one({'_id': job.id}, {'$set': changes})
if result and result.matched_count == 0:
raise JobLookupError(job.id)
def remove_job(self, job_id):
result = self.collection.remove(job_id)
if result and result['n'] == 0:
result = self.collection.delete_one({'_id': job_id})
if result and result.deleted_count == 0:
raise JobLookupError(job_id)
def remove_all_jobs(self):

View File

@ -11,7 +11,7 @@ except ImportError: # pragma: nocover
try:
from sqlalchemy import (
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select)
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select, and_)
from sqlalchemy.exc import IntegrityError
from sqlalchemy.sql.expression import null
except ImportError: # pragma: nocover
@ -134,7 +134,7 @@ class SQLAlchemyJobStore(BaseJobStore):
jobs = []
selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).\
order_by(self.jobs_t.c.next_run_time)
selectable = selectable.where(*conditions) if conditions else selectable
selectable = selectable.where(and_(*conditions)) if conditions else selectable
failed_job_ids = set()
for row in self.engine.execute(selectable):
try:

View File

@ -1,6 +1,5 @@
from __future__ import absolute_import
import os
from datetime import datetime
from pytz import utc
@ -65,7 +64,7 @@ class ZooKeeperJobStore(BaseJobStore):
def lookup_job(self, job_id):
self._ensure_paths()
node_path = os.path.join(self.path, job_id)
node_path = self.path + "/" + str(job_id)
try:
content, _ = self.client.get(node_path)
doc = pickle.loads(content)
@ -92,7 +91,7 @@ class ZooKeeperJobStore(BaseJobStore):
def add_job(self, job):
self._ensure_paths()
node_path = os.path.join(self.path, str(job.id))
node_path = self.path + "/" + str(job.id)
value = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': job.__getstate__()
@ -105,7 +104,7 @@ class ZooKeeperJobStore(BaseJobStore):
def update_job(self, job):
self._ensure_paths()
node_path = os.path.join(self.path, str(job.id))
node_path = self.path + "/" + str(job.id)
changes = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': job.__getstate__()
@ -118,7 +117,7 @@ class ZooKeeperJobStore(BaseJobStore):
def remove_job(self, job_id):
self._ensure_paths()
node_path = os.path.join(self.path, str(job_id))
node_path = self.path + "/" + str(job_id)
try:
self.client.delete(node_path)
except NoNodeError:
@ -151,7 +150,7 @@ class ZooKeeperJobStore(BaseJobStore):
all_ids = self.client.get_children(self.path)
for node_name in all_ids:
try:
node_path = os.path.join(self.path, node_name)
node_path = self.path + "/" + node_name
content, _ = self.client.get(node_path)
doc = pickle.loads(content)
job_def = {

View File

@ -38,13 +38,19 @@ class AsyncIOScheduler(BaseScheduler):
_eventloop = None
_timeout = None
def start(self, paused=False):
if not self._eventloop:
self._eventloop = asyncio.get_event_loop()
super(AsyncIOScheduler, self).start(paused)
@run_in_event_loop
def shutdown(self, wait=True):
super(AsyncIOScheduler, self).shutdown(wait)
self._stop_timer()
def _configure(self, config):
self._eventloop = maybe_ref(config.pop('event_loop', None)) or asyncio.get_event_loop()
self._eventloop = maybe_ref(config.pop('event_loop', None))
super(AsyncIOScheduler, self)._configure(config)
def _start_timer(self, wait_seconds):

Some files were not shown because too many files have changed in this diff Show More