Polling system for chosing MC server
This commit is contained in:
parent
8247cf1c44
commit
80c3fdf5be
2
.idea/discord.xml
generated
2
.idea/discord.xml
generated
@ -1,7 +1,7 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project version="4">
|
<project version="4">
|
||||||
<component name="DiscordProjectSettings">
|
<component name="DiscordProjectSettings">
|
||||||
<option name="show" value="true" />
|
<option name="show" value="PROJECT_FILES" />
|
||||||
</component>
|
</component>
|
||||||
<component name="ProjectNotificationSettings">
|
<component name="ProjectNotificationSettings">
|
||||||
<option name="askShowProject" value="false" />
|
<option name="askShowProject" value="false" />
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
from discord.ext import commands
|
import asyncio
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
import pymysql
|
import pymysql
|
||||||
import asyncio
|
from discord.ext import commands
|
||||||
|
|
||||||
password = open("../../../../sqlPass.txt", 'r')
|
password = open("../../../../sqlPass.txt", 'r')
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from discord.ext import commands
|
|
||||||
import discord
|
import discord
|
||||||
import pymysql
|
import pymysql
|
||||||
|
from discord.ext import commands
|
||||||
|
|
||||||
password = open("../../../../sqlPass.txt", 'r')
|
password = open("../../../../sqlPass.txt", 'r')
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
import discord
|
|
||||||
|
155
scr/Modules/TMC/McRoll.py
Normal file
155
scr/Modules/TMC/McRoll.py
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
import asyncio
|
||||||
|
import math
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
import discord
|
||||||
|
import pymysql
|
||||||
|
from discord.ext import commands
|
||||||
|
|
||||||
|
|
||||||
|
# password = open("../../../sqlPass.txt", 'r')
|
||||||
|
|
||||||
|
def get_con():
|
||||||
|
return pymysql.connect(host='192.168.1.52',
|
||||||
|
port=5618,
|
||||||
|
user='Quentin',
|
||||||
|
password='kaPl0wskii',
|
||||||
|
db='mc',
|
||||||
|
charset='utf8mb4',
|
||||||
|
cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
|
||||||
|
|
||||||
|
class McRoll(commands.Cog):
|
||||||
|
def __init__(self, client):
|
||||||
|
self.client = client
|
||||||
|
self.currentPoll = None
|
||||||
|
self.embedInfo = """Each server has a multiplier based on how long it has been sense that server has been rolled.
|
||||||
|
When this poll is over all servers with 0 votes will be disregarded.
|
||||||
|
If there are 4 or more servers left the two with the lowest votes will be disregarded.
|
||||||
|
Poll is rerun until there are only 2 servers, these two are the winners. Ties reroll the poll.
|
||||||
|
You can only vote for one server per poll. """
|
||||||
|
|
||||||
|
|
||||||
|
async def start_poll(self, ctx, choices):
|
||||||
|
embed = discord.Embed(title="Poll", description=self.embedInfo, timestamp=datetime.now() + timedelta(hours=12))
|
||||||
|
for choice in choices.items():
|
||||||
|
choice = choice[1]
|
||||||
|
embed.add_field(
|
||||||
|
name=choice['serverName'] + ' ' + (str(self.client.get_emoji(int(choice['reaction'])))
|
||||||
|
if choice['getEmoji']
|
||||||
|
else bytes(choice['reaction'], "utf-8").decode("unicode_escape")),
|
||||||
|
value="Multiplier : " + str(round(2 * math.log10(choice['lastActivated'] + 1) + 1, 2)) + '\n' +
|
||||||
|
"Last Rolled : " + str(choice['lastActivated'] * 2) + " weeks ago.")
|
||||||
|
self.currentPoll = await ctx.channel.send(embed=embed)
|
||||||
|
await ctx.channel.send("@everyone")
|
||||||
|
for choice in choices.items():
|
||||||
|
choice = choice[1]
|
||||||
|
await self.currentPoll.add_reaction(self.client.get_emoji(int(choice['reaction']))
|
||||||
|
if choice['getEmoji']
|
||||||
|
else bytes(choice['reaction'], "utf-8").decode("unicode_escape"))
|
||||||
|
await asyncio.sleep(43200)
|
||||||
|
|
||||||
|
for reaction in self.currentPoll.reactions:
|
||||||
|
async for user in reaction.users():
|
||||||
|
serverIp = self.reaction_to_serverip(reaction)
|
||||||
|
if user.id != 533427166193385494:
|
||||||
|
choices[serverIp]['votes'] = choices[serverIp]['votes'] + 1
|
||||||
|
|
||||||
|
choices[serverIp]['score'] = choices[serverIp]['votes'] * round(
|
||||||
|
2 * math.log10(choices[serverIp]['lastActivated'] + 1) + 1, 2)
|
||||||
|
|
||||||
|
for choice in list(choices):
|
||||||
|
if choices[choice]['score'] <= 0:
|
||||||
|
choices.pop(choice)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(choices) >= 4:
|
||||||
|
choices = self.pop_lowest(choices)
|
||||||
|
choices = self.pop_lowest(choices)
|
||||||
|
elif len(choices) == 3:
|
||||||
|
choices = self.pop_lowest(choices)
|
||||||
|
|
||||||
|
return choices
|
||||||
|
|
||||||
|
|
||||||
|
def pop_lowest(self, choices):
|
||||||
|
lowest = {'score': 10}
|
||||||
|
pop = False
|
||||||
|
for choice in list(choices):
|
||||||
|
if choices[choice]['score'] < lowest['score']:
|
||||||
|
lowest = choices[choice]
|
||||||
|
pop = True
|
||||||
|
if pop:
|
||||||
|
choices.pop(lowest['serverIP'])
|
||||||
|
return choices
|
||||||
|
|
||||||
|
def reaction_to_serverip(self, reaction):
|
||||||
|
con = get_con()
|
||||||
|
escapedReaction = reaction.emoji.encode('unicode_escape').decode('utf-8')
|
||||||
|
with con.cursor() as cursor:
|
||||||
|
cursor.execute("SELECT serverIP FROM mc.server_list WHERE reaction=%s;", escapedReaction)
|
||||||
|
serverIp = cursor.fetchone()
|
||||||
|
con.close()
|
||||||
|
return serverIp['serverIP']
|
||||||
|
|
||||||
|
|
||||||
|
@commands.Cog.listener()
|
||||||
|
async def on_reaction_add(self, reaction, user):
|
||||||
|
reactedUsers = {}
|
||||||
|
if reaction.message.id == self.currentPoll.id and user.id != 533427166193385494:
|
||||||
|
for iReaction in reaction.message.reactions:
|
||||||
|
async for iUser in iReaction.users():
|
||||||
|
if iUser.id != 533427166193385494:
|
||||||
|
if iUser.id not in reactedUsers:
|
||||||
|
reactedUsers[iUser.id] = False
|
||||||
|
if reactedUsers[iUser.id]:
|
||||||
|
await reaction.remove(user)
|
||||||
|
self.currentPoll = reaction.message
|
||||||
|
await user.send(
|
||||||
|
"You can only vote for one option, please remove your previous vote to change it.")
|
||||||
|
return
|
||||||
|
self.currentPoll = reaction.message
|
||||||
|
reactedUsers[iUser.id] = True
|
||||||
|
|
||||||
|
|
||||||
|
@commands.command()
|
||||||
|
async def mc_roll(self, ctx):
|
||||||
|
if ctx.message.author.id != 305589587215122432:
|
||||||
|
return
|
||||||
|
|
||||||
|
con = get_con()
|
||||||
|
with con.cursor() as cursor:
|
||||||
|
cursor.execute("SELECT serverIP, serverName, lastActivated, reaction, getEmoji "
|
||||||
|
"FROM mc.server_list WHERE lastActivated is not null")
|
||||||
|
choices = cursor.fetchall()
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
dictChoices = {}
|
||||||
|
for choice in choices:
|
||||||
|
choice['score'] = 0
|
||||||
|
choice['votes'] = 0
|
||||||
|
dictChoices[choice['serverIP']] = choice
|
||||||
|
|
||||||
|
dictChoices = await self.start_poll(ctx, dictChoices)
|
||||||
|
|
||||||
|
while len(dictChoices) > 2:
|
||||||
|
print(dictChoices)
|
||||||
|
dictChoices = await self.start_poll(ctx, dictChoices)
|
||||||
|
|
||||||
|
embed = discord.Embed(title="Winner", description="Congratulations")
|
||||||
|
for item in list(dictChoices):
|
||||||
|
winner = dictChoices[item]
|
||||||
|
embed.add_field(
|
||||||
|
name=winner['serverName'],
|
||||||
|
value="Multiplier : " + str(round(2 * math.log10(winner['lastActivated'] + 1) + 1, 2)) + '\n' +
|
||||||
|
"Last Rolled : " + str(winner['lastActivated'] * 2) + " weeks ago." + '\n' +
|
||||||
|
"Votes : " + str(winner['votes']) + '\n' +
|
||||||
|
"Calculated Votes: " + str(winner['votes'] * round(2 * math.log10(winner['lastActivated'] + 1) + 1, 2))
|
||||||
|
)
|
||||||
|
|
||||||
|
await ctx.channel.send(embed=embed)
|
||||||
|
await ctx.channel.send("@everyone")
|
||||||
|
|
||||||
|
|
||||||
|
def setup(client):
|
||||||
|
client.add_cog(McRoll(client))
|
@ -1,6 +1,7 @@
|
|||||||
from discord.ext import commands
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
from discord.ext import commands
|
||||||
|
|
||||||
content = [", was just chosen to be the one.",
|
content = [", was just chosen to be the one.",
|
||||||
", has joined!",
|
", has joined!",
|
||||||
", looks like someone took the slow train from Philly.",
|
", looks like someone took the slow train from Philly.",
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from discord.ext import commands
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
from discord.ext import commands
|
||||||
|
|
||||||
content = [" just bit the dust.",
|
content = [" just bit the dust.",
|
||||||
" gave up on life.",
|
" gave up on life.",
|
||||||
" couldn't take the heat anymore.",
|
" couldn't take the heat anymore.",
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from discord.ext import commands
|
from discord.ext import commands
|
||||||
import discord
|
|
||||||
|
|
||||||
|
|
||||||
class Spam(commands.Cog):
|
class Spam(commands.Cog):
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
|
import discord
|
||||||
from discord.ext.commands import Bot
|
from discord.ext.commands import Bot
|
||||||
|
|
||||||
token = open("../token.txt")
|
token = open("../token.txt")
|
||||||
|
|
||||||
cogs = ["Modules.TMC.autoReply.Join", "Modules.TMC.autoReply.Leave",
|
cogs = ["Modules.TMC.autoReply.Join", "Modules.TMC.autoReply.Leave",
|
||||||
"Modules.TMC.ChangeServer.Change"]
|
"Modules.TMC.McRoll"]
|
||||||
|
|
||||||
prefix = 'o!'
|
prefix = 'o!'
|
||||||
|
intents = discord.Intents().all()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
tokens = open("tokens.txt", 'r')
|
tokens = open("tokens.txt", 'r')
|
||||||
@ -17,7 +19,7 @@ elif bot_running == 'D':
|
|||||||
token = tokens[1].rstrip()
|
token = tokens[1].rstrip()
|
||||||
tokens = ''
|
tokens = ''
|
||||||
"""
|
"""
|
||||||
client = Bot(command_prefix=prefix)
|
client = Bot(command_prefix=prefix, intents=intents)
|
||||||
client.remove_command("help")
|
client.remove_command("help")
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: PyMySQL
|
Name: PyMySQL
|
||||||
Version: 0.9.3
|
Version: 0.10.1
|
||||||
Summary: Pure Python MySQL Driver
|
Summary: Pure Python MySQL Driver
|
||||||
Home-page: https://github.com/PyMySQL/PyMySQL/
|
Home-page: https://github.com/PyMySQL/PyMySQL/
|
||||||
Author: yutaka.matsubara
|
Author: yutaka.matsubara
|
||||||
Author-email: yutaka.matsubara@gmail.com
|
Author-email: yutaka.matsubara@gmail.com
|
||||||
Maintainer: INADA Naoki
|
Maintainer: Inada Naoki
|
||||||
Maintainer-email: songofacandy@gmail.com
|
Maintainer-email: songofacandy@gmail.com
|
||||||
License: "MIT"
|
License: "MIT"
|
||||||
Project-URL: Documentation, https://pymysql.readthedocs.io/
|
Project-URL: Documentation, https://pymysql.readthedocs.io/
|
||||||
@ -15,15 +15,18 @@ Classifier: Development Status :: 5 - Production/Stable
|
|||||||
Classifier: Programming Language :: Python :: 2
|
Classifier: Programming Language :: Python :: 2
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
Classifier: Programming Language :: Python :: 3
|
Classifier: Programming Language :: Python :: 3
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
Classifier: Intended Audience :: Developers
|
Classifier: Intended Audience :: Developers
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
Classifier: Topic :: Database
|
Classifier: Topic :: Database
|
||||||
|
Provides-Extra: ed25519
|
||||||
|
Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
|
||||||
Provides-Extra: rsa
|
Provides-Extra: rsa
|
||||||
Requires-Dist: cryptography ; extra == 'rsa'
|
Requires-Dist: cryptography ; extra == 'rsa'
|
||||||
|
|
||||||
@ -67,7 +70,7 @@ Requirements
|
|||||||
|
|
||||||
* Python -- one of the following:
|
* Python -- one of the following:
|
||||||
|
|
||||||
- CPython_ : 2.7 and >= 3.4
|
- CPython_ : 2.7 and >= 3.5
|
||||||
- PyPy_ : Latest version
|
- PyPy_ : Latest version
|
||||||
|
|
||||||
* MySQL Server -- one of the following:
|
* MySQL Server -- one of the following:
|
||||||
@ -95,6 +98,11 @@ you need to install additional dependency::
|
|||||||
|
|
||||||
$ python3 -m pip install PyMySQL[rsa]
|
$ python3 -m pip install PyMySQL[rsa]
|
||||||
|
|
||||||
|
To use MariaDB's "ed25519" authentication method, you need to install
|
||||||
|
additional dependency::
|
||||||
|
|
||||||
|
$ python3 -m pip install PyMySQL[ed25519]
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
-------------
|
-------------
|
@ -1,11 +1,12 @@
|
|||||||
PyMySQL-0.9.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
PyMySQL-0.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
PyMySQL-0.9.3.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
|
PyMySQL-0.10.1.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
|
||||||
PyMySQL-0.9.3.dist-info/METADATA,sha256=8_R1N3H_AmpUu72ctuiQVI1Pk2SMlb9sy1uGlnxXB4U,5212
|
PyMySQL-0.10.1.dist-info/METADATA,sha256=SP0KPSfmgNJ2ujhGRrRRiWOodzv62BfYnbY1OXX3DTI,5481
|
||||||
PyMySQL-0.9.3.dist-info/RECORD,,
|
PyMySQL-0.10.1.dist-info/RECORD,,
|
||||||
PyMySQL-0.9.3.dist-info/WHEEL,sha256=_wJFdOYk7i3xxT8ElOkUJvOdOvfNGbR9g-bf6UQT6sU,110
|
PyMySQL-0.10.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
PyMySQL-0.9.3.dist-info/pbr.json,sha256=Lqvh8-9N7qS6SLUlEJ5GDLWioQcvR9n1WWjMEfJ5mv8,47
|
PyMySQL-0.10.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||||
PyMySQL-0.9.3.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
|
PyMySQL-0.10.1.dist-info/pbr.json,sha256=Lqvh8-9N7qS6SLUlEJ5GDLWioQcvR9n1WWjMEfJ5mv8,47
|
||||||
pymysql/__init__.py,sha256=ESllVZVoMVkJ0w9FoaMMirjFbWNc6wmHEVHzGKEBefc,4732
|
PyMySQL-0.10.1.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
|
||||||
|
pymysql/__init__.py,sha256=KDHcmnEoEDMmRPNO5JFcxb7lsypDmwGn5Td-f-X6xDY,4733
|
||||||
pymysql/__pycache__/__init__.cpython-36.pyc,,
|
pymysql/__pycache__/__init__.cpython-36.pyc,,
|
||||||
pymysql/__pycache__/_auth.cpython-36.pyc,,
|
pymysql/__pycache__/_auth.cpython-36.pyc,,
|
||||||
pymysql/__pycache__/_compat.cpython-36.pyc,,
|
pymysql/__pycache__/_compat.cpython-36.pyc,,
|
||||||
@ -19,15 +20,15 @@ pymysql/__pycache__/optionfile.cpython-36.pyc,,
|
|||||||
pymysql/__pycache__/protocol.cpython-36.pyc,,
|
pymysql/__pycache__/protocol.cpython-36.pyc,,
|
||||||
pymysql/__pycache__/times.cpython-36.pyc,,
|
pymysql/__pycache__/times.cpython-36.pyc,,
|
||||||
pymysql/__pycache__/util.cpython-36.pyc,,
|
pymysql/__pycache__/util.cpython-36.pyc,,
|
||||||
pymysql/_auth.py,sha256=X2AiuevuDaD2L4wJO5J7rymvJJZm6mND7WYmeIb7wEk,7720
|
pymysql/_auth.py,sha256=pEeHBpQ15h2wfj6k7np6LVHVz34whEXSs5KrqeYtDGw,9564
|
||||||
pymysql/_compat.py,sha256=DSxMV2ib-rhIuQIKiXX44yds_0bN2M_RddfYQiSdB6U,481
|
pymysql/_compat.py,sha256=DSxMV2ib-rhIuQIKiXX44yds_0bN2M_RddfYQiSdB6U,481
|
||||||
pymysql/_socketio.py,sha256=smsw4wudNM4CKl85uis8QHfjDhz2iXQRvl8QV4TmB1w,4049
|
pymysql/_socketio.py,sha256=smsw4wudNM4CKl85uis8QHfjDhz2iXQRvl8QV4TmB1w,4049
|
||||||
pymysql/charset.py,sha256=tNeEkuzFXM5zeuOYm_XSM8zdt5P_paV2SyUB9B2ibqI,10330
|
pymysql/charset.py,sha256=zaaRbEQrFiE0iCd3AB52WJY9VqVxQcp8sYcoPDlPdWI,10308
|
||||||
pymysql/connections.py,sha256=98DHxN-h3tupGBIReR98E7LSTR7-OIYh3tulXGlGdvc,49041
|
pymysql/connections.py,sha256=xR0gWxvQ6IxBcFhY9JPmYRCcvs6xSnRKUq-DZ6MpfNY,49010
|
||||||
pymysql/constants/CLIENT.py,sha256=cPMxnQQbBG6xqaEDwqzggTfWIuJQ1Oy7HrIgw_vgpo4,853
|
pymysql/constants/CLIENT.py,sha256=cPMxnQQbBG6xqaEDwqzggTfWIuJQ1Oy7HrIgw_vgpo4,853
|
||||||
pymysql/constants/COMMAND.py,sha256=ypGdEUmi8m9cdBZ3rDU6mb7bsIyu9ldCDvc4pNF7V70,680
|
pymysql/constants/COMMAND.py,sha256=ypGdEUmi8m9cdBZ3rDU6mb7bsIyu9ldCDvc4pNF7V70,680
|
||||||
pymysql/constants/CR.py,sha256=5ojVkbisyw7Qo_cTNpnHYvV6xHRZXK39Qqv8tjGbIbg,2228
|
pymysql/constants/CR.py,sha256=5ojVkbisyw7Qo_cTNpnHYvV6xHRZXK39Qqv8tjGbIbg,2228
|
||||||
pymysql/constants/ER.py,sha256=8q1PZOxezbXbRaPZrHrQebyLDx4CvAUkBArJ9xBuW0Y,12297
|
pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
|
||||||
pymysql/constants/FIELD_TYPE.py,sha256=yHZLSyQewMxTDx4PLrI1H_iwH2FnsrgBZFa56UG2HiQ,372
|
pymysql/constants/FIELD_TYPE.py,sha256=yHZLSyQewMxTDx4PLrI1H_iwH2FnsrgBZFa56UG2HiQ,372
|
||||||
pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
|
pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
|
||||||
pymysql/constants/SERVER_STATUS.py,sha256=KogVCOrV-S5aAFwyVKeKgua13nwdt1WFyHagjCZbcpM,334
|
pymysql/constants/SERVER_STATUS.py,sha256=KogVCOrV-S5aAFwyVKeKgua13nwdt1WFyHagjCZbcpM,334
|
||||||
@ -40,10 +41,10 @@ pymysql/constants/__pycache__/FIELD_TYPE.cpython-36.pyc,,
|
|||||||
pymysql/constants/__pycache__/FLAG.cpython-36.pyc,,
|
pymysql/constants/__pycache__/FLAG.cpython-36.pyc,,
|
||||||
pymysql/constants/__pycache__/SERVER_STATUS.cpython-36.pyc,,
|
pymysql/constants/__pycache__/SERVER_STATUS.cpython-36.pyc,,
|
||||||
pymysql/constants/__pycache__/__init__.cpython-36.pyc,,
|
pymysql/constants/__pycache__/__init__.cpython-36.pyc,,
|
||||||
pymysql/converters.py,sha256=BWHMbquNFUKfFXyZh6Qwch6mYLyYSQeaeifL4VLuISc,12235
|
pymysql/converters.py,sha256=kUT2KQdkqNTuSxzURVnQKS1ZcatoFTUfYe5b5QSJuRI,11055
|
||||||
pymysql/cursors.py,sha256=m6MhwWnm3CbTE4JAXzDuo6CYKC7W6JzsY4PN9eDmKJk,17238
|
pymysql/cursors.py,sha256=eiP_oTDi1MM5EYLHoecwbv5BXWJ1qEjfK8Uy3SjGEcs,16250
|
||||||
pymysql/err.py,sha256=PaXGLqOnDXJoeYjLbMZQE5UQ3MHFqiiHCzaDPP-_NJA,3716
|
pymysql/err.py,sha256=Vdrt2rVaSePVlB_uy0JNoeN6zYBt0_mM1UFDighLgNM,3734
|
||||||
pymysql/optionfile.py,sha256=4yW8A7aAR2Aild7ibLOCzIlTCcYd90PtR8LRGJSZs8o,658
|
pymysql/optionfile.py,sha256=4yW8A7aAR2Aild7ibLOCzIlTCcYd90PtR8LRGJSZs8o,658
|
||||||
pymysql/protocol.py,sha256=GH2yzGqPwqX2t2G87k3EJQt7bYQOLEN6QoN_m15c4Ak,12024
|
pymysql/protocol.py,sha256=9hAfVK-g4i53gHMoGj9QrPApywMYVM8oxGAuKb_-PXo,12071
|
||||||
pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
|
pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
|
||||||
pymysql/util.py,sha256=jKPts8cOMIXDndjsV3783VW-iq9uMxETWqfHP6Bd-Zo,180
|
pymysql/util.py,sha256=jKPts8cOMIXDndjsV3783VW-iq9uMxETWqfHP6Bd-Zo,180
|
@ -1,5 +1,5 @@
|
|||||||
Wheel-Version: 1.0
|
Wheel-Version: 1.0
|
||||||
Generator: bdist_wheel (0.33.6)
|
Generator: bdist_wheel (0.34.2)
|
||||||
Root-Is-Purelib: true
|
Root-Is-Purelib: true
|
||||||
Tag: py2-none-any
|
Tag: py2-none-any
|
||||||
Tag: py3-none-any
|
Tag: py3-none-any
|
Binary file not shown.
123
venv/Lib/site-packages/_distutils_hack/__init__.py
Normal file
123
venv/Lib/site-packages/_distutils_hack/__init__.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import importlib
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||||
|
|
||||||
|
|
||||||
|
def warn_distutils_present():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
if is_pypy and sys.version_info < (3, 7):
|
||||||
|
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||||
|
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||||
|
return
|
||||||
|
warnings.warn(
|
||||||
|
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||||
|
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||||
|
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||||
|
"using distutils directly, ensure that setuptools is installed in the "
|
||||||
|
"traditional way (e.g. not an editable install), and/or make sure "
|
||||||
|
"that setuptools is always imported before distutils.")
|
||||||
|
|
||||||
|
|
||||||
|
def clear_distutils():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
warnings.warn("Setuptools is replacing distutils.")
|
||||||
|
mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
|
||||||
|
for name in mods:
|
||||||
|
del sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
def enabled():
|
||||||
|
"""
|
||||||
|
Allow selection of distutils by environment variable.
|
||||||
|
"""
|
||||||
|
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
|
||||||
|
return which == 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_local_distutils():
|
||||||
|
clear_distutils()
|
||||||
|
distutils = importlib.import_module('setuptools._distutils')
|
||||||
|
distutils.__name__ = 'distutils'
|
||||||
|
sys.modules['distutils'] = distutils
|
||||||
|
|
||||||
|
# sanity check that submodules load as expected
|
||||||
|
core = importlib.import_module('distutils.core')
|
||||||
|
assert '_distutils' in core.__file__, core.__file__
|
||||||
|
|
||||||
|
|
||||||
|
def do_override():
|
||||||
|
"""
|
||||||
|
Ensure that the local copy of distutils is preferred over stdlib.
|
||||||
|
|
||||||
|
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||||
|
for more motivation.
|
||||||
|
"""
|
||||||
|
if enabled():
|
||||||
|
warn_distutils_present()
|
||||||
|
ensure_local_distutils()
|
||||||
|
|
||||||
|
|
||||||
|
class DistutilsMetaFinder:
|
||||||
|
def find_spec(self, fullname, path, target=None):
|
||||||
|
if path is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||||
|
method = getattr(self, method_name, lambda: None)
|
||||||
|
return method()
|
||||||
|
|
||||||
|
def spec_for_distutils(self):
|
||||||
|
import importlib.abc
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
class DistutilsLoader(importlib.abc.Loader):
|
||||||
|
|
||||||
|
def create_module(self, spec):
|
||||||
|
return importlib.import_module('setuptools._distutils')
|
||||||
|
|
||||||
|
def exec_module(self, module):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return importlib.util.spec_from_loader('distutils', DistutilsLoader())
|
||||||
|
|
||||||
|
def spec_for_pip(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running under pip.
|
||||||
|
See pypa/pip#8761 for rationale.
|
||||||
|
"""
|
||||||
|
if self.pip_imported_during_build():
|
||||||
|
return
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pip_imported_during_build():
|
||||||
|
"""
|
||||||
|
Detect if pip is being imported in a build script. Ref #2355.
|
||||||
|
"""
|
||||||
|
import traceback
|
||||||
|
return any(
|
||||||
|
frame.f_globals['__file__'].endswith('setup.py')
|
||||||
|
for frame, line in traceback.walk_stack(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||||
|
|
||||||
|
|
||||||
|
def add_shim():
|
||||||
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_shim():
|
||||||
|
try:
|
||||||
|
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
1
venv/Lib/site-packages/_distutils_hack/override.py
Normal file
1
venv/Lib/site-packages/_distutils_hack/override.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
__import__('_distutils_hack').do_override()
|
@ -1,433 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: aiohttp
|
|
||||||
Version: 3.5.4
|
|
||||||
Summary: Async http client/server framework (asyncio)
|
|
||||||
Home-page: https://github.com/aio-libs/aiohttp
|
|
||||||
Author: Nikolay Kim
|
|
||||||
Author-email: fafhrd91@gmail.com
|
|
||||||
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
|
|
||||||
Maintainer-email: aio-libs@googlegroups.com
|
|
||||||
License: Apache 2
|
|
||||||
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
|
|
||||||
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
|
|
||||||
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
|
|
||||||
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
|
|
||||||
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
|
|
||||||
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
|
||||||
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
|
||||||
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
|
||||||
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: License :: OSI Approved :: Apache Software License
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Operating System :: MacOS :: MacOS X
|
|
||||||
Classifier: Operating System :: Microsoft :: Windows
|
|
||||||
Classifier: Topic :: Internet :: WWW/HTTP
|
|
||||||
Classifier: Framework :: AsyncIO
|
|
||||||
Requires-Python: >=3.5.3
|
|
||||||
Requires-Dist: attrs (>=17.3.0)
|
|
||||||
Requires-Dist: chardet (<4.0,>=2.0)
|
|
||||||
Requires-Dist: multidict (<5.0,>=4.0)
|
|
||||||
Requires-Dist: async-timeout (<4.0,>=3.0)
|
|
||||||
Requires-Dist: yarl (<2.0,>=1.0)
|
|
||||||
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
|
|
||||||
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
|
|
||||||
Provides-Extra: speedups
|
|
||||||
Requires-Dist: aiodns ; extra == 'speedups'
|
|
||||||
Requires-Dist: brotlipy ; extra == 'speedups'
|
|
||||||
Requires-Dist: cchardet ; extra == 'speedups'
|
|
||||||
|
|
||||||
==================================
|
|
||||||
Async http client/server framework
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
|
|
||||||
:height: 64px
|
|
||||||
:width: 64px
|
|
||||||
:alt: aiohttp logo
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
|
|
||||||
:target: https://travis-ci.com/aio-libs/aiohttp
|
|
||||||
:align: right
|
|
||||||
:alt: Travis status for master branch
|
|
||||||
|
|
||||||
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
|
|
||||||
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
|
|
||||||
:align: right
|
|
||||||
:alt: AppVeyor status for master branch
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
|
||||||
:target: https://codecov.io/gh/aio-libs/aiohttp
|
|
||||||
:alt: codecov.io status for master branch
|
|
||||||
|
|
||||||
.. image:: https://badge.fury.io/py/aiohttp.svg
|
|
||||||
:target: https://pypi.org/project/aiohttp
|
|
||||||
:alt: Latest PyPI package version
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
|
||||||
:target: https://docs.aiohttp.org/
|
|
||||||
:alt: Latest Read The Docs
|
|
||||||
|
|
||||||
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
|
||||||
:target: https://gitter.im/aio-libs/Lobby
|
|
||||||
:alt: Chat on Gitter
|
|
||||||
|
|
||||||
Key Features
|
|
||||||
============
|
|
||||||
|
|
||||||
- Supports both client and server side of HTTP protocol.
|
|
||||||
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
|
||||||
Callback Hell.
|
|
||||||
- Provides Web-server with middlewares and pluggable routing.
|
|
||||||
|
|
||||||
|
|
||||||
Getting started
|
|
||||||
===============
|
|
||||||
|
|
||||||
Client
|
|
||||||
------
|
|
||||||
|
|
||||||
To get something from the web:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
async def fetch(session, url):
|
|
||||||
async with session.get(url) as response:
|
|
||||||
return await response.text()
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
html = await fetch(session, 'http://python.org')
|
|
||||||
print(html)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
loop.run_until_complete(main())
|
|
||||||
|
|
||||||
|
|
||||||
Server
|
|
||||||
------
|
|
||||||
|
|
||||||
An example using a simple server:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# examples/server_simple.py
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
async def handle(request):
|
|
||||||
name = request.match_info.get('name', "Anonymous")
|
|
||||||
text = "Hello, " + name
|
|
||||||
return web.Response(text=text)
|
|
||||||
|
|
||||||
async def wshandle(request):
|
|
||||||
ws = web.WebSocketResponse()
|
|
||||||
await ws.prepare(request)
|
|
||||||
|
|
||||||
async for msg in ws:
|
|
||||||
if msg.type == web.WSMsgType.text:
|
|
||||||
await ws.send_str("Hello, {}".format(msg.data))
|
|
||||||
elif msg.type == web.WSMsgType.binary:
|
|
||||||
await ws.send_bytes(msg.data)
|
|
||||||
elif msg.type == web.WSMsgType.close:
|
|
||||||
break
|
|
||||||
|
|
||||||
return ws
|
|
||||||
|
|
||||||
|
|
||||||
app = web.Application()
|
|
||||||
app.add_routes([web.get('/', handle),
|
|
||||||
web.get('/echo', wshandle),
|
|
||||||
web.get('/{name}', handle)])
|
|
||||||
|
|
||||||
web.run_app(app)
|
|
||||||
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
=============
|
|
||||||
|
|
||||||
https://aiohttp.readthedocs.io/
|
|
||||||
|
|
||||||
|
|
||||||
Demos
|
|
||||||
=====
|
|
||||||
|
|
||||||
https://github.com/aio-libs/aiohttp-demos
|
|
||||||
|
|
||||||
|
|
||||||
External links
|
|
||||||
==============
|
|
||||||
|
|
||||||
* `Third party libraries
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
|
||||||
* `Built with aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
|
||||||
* `Powered by aiohttp
|
|
||||||
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
|
||||||
|
|
||||||
Feel free to make a Pull Request for adding your link to these pages!
|
|
||||||
|
|
||||||
|
|
||||||
Communication channels
|
|
||||||
======================
|
|
||||||
|
|
||||||
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
|
|
||||||
|
|
||||||
Feel free to post your questions and ideas here.
|
|
||||||
|
|
||||||
*gitter chat* https://gitter.im/aio-libs/Lobby
|
|
||||||
|
|
||||||
We support `Stack Overflow
|
|
||||||
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
|
||||||
Please add *aiohttp* tag to your question there.
|
|
||||||
|
|
||||||
Requirements
|
|
||||||
============
|
|
||||||
|
|
||||||
- Python >= 3.5.3
|
|
||||||
- async-timeout_
|
|
||||||
- attrs_
|
|
||||||
- chardet_
|
|
||||||
- multidict_
|
|
||||||
- yarl_
|
|
||||||
|
|
||||||
Optionally you may install the cChardet_ and aiodns_ libraries (highly
|
|
||||||
recommended for sake of speed).
|
|
||||||
|
|
||||||
.. _chardet: https://pypi.python.org/pypi/chardet
|
|
||||||
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
|
||||||
.. _attrs: https://github.com/python-attrs/attrs
|
|
||||||
.. _multidict: https://pypi.python.org/pypi/multidict
|
|
||||||
.. _yarl: https://pypi.python.org/pypi/yarl
|
|
||||||
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
|
||||||
.. _cChardet: https://pypi.python.org/pypi/cchardet
|
|
||||||
|
|
||||||
License
|
|
||||||
=======
|
|
||||||
|
|
||||||
``aiohttp`` is offered under the Apache 2 license.
|
|
||||||
|
|
||||||
|
|
||||||
Keepsafe
|
|
||||||
========
|
|
||||||
|
|
||||||
The aiohttp community would like to thank Keepsafe
|
|
||||||
(https://www.getkeepsafe.com) for its support in the early days of
|
|
||||||
the project.
|
|
||||||
|
|
||||||
|
|
||||||
Source code
|
|
||||||
===========
|
|
||||||
|
|
||||||
The latest developer version is available in a GitHub repository:
|
|
||||||
https://github.com/aio-libs/aiohttp
|
|
||||||
|
|
||||||
Benchmarks
|
|
||||||
==========
|
|
||||||
|
|
||||||
If you are interested in efficiency, the AsyncIO community maintains a
|
|
||||||
list of benchmarks on the official wiki:
|
|
||||||
https://github.com/python/asyncio/wiki/Benchmarks
|
|
||||||
|
|
||||||
=========
|
|
||||||
Changelog
|
|
||||||
=========
|
|
||||||
|
|
||||||
..
|
|
||||||
You should *NOT* be adding new change log entries to this file, this
|
|
||||||
file is managed by towncrier. You *may* edit previous change logs to
|
|
||||||
fix problems like typo corrections or such.
|
|
||||||
To add a new change log entry, please see
|
|
||||||
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
|
|
||||||
we named the news folder "changes".
|
|
||||||
|
|
||||||
WARNING: Don't drop the next directive!
|
|
||||||
|
|
||||||
.. towncrier release notes start
|
|
||||||
|
|
||||||
3.5.4 (2019-01-12)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
|
|
||||||
partial content only in case of compressed content
|
|
||||||
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
|
|
||||||
|
|
||||||
|
|
||||||
3.5.3 (2019-01-10)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of ``access_log=True`` and the event loop being in debug mode.
|
|
||||||
`#3504 <https://github.com/aio-libs/aiohttp/issues/3504>`_
|
|
||||||
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
|
|
||||||
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
|
|
||||||
- Send custom per-request cookies even if session jar is empty
|
|
||||||
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
|
|
||||||
- Restore Linux binary wheels publishing on PyPI
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.5.2 (2019-01-08)
|
|
||||||
==================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work with files asynchronously.
|
|
||||||
I/O based payloads from ``payload.py`` uses a ``ThreadPoolExecutor`` to work with I/O objects asynchronously.
|
|
||||||
`#3313 <https://github.com/aio-libs/aiohttp/issues/3313>`_
|
|
||||||
- Internal Server Errors in plain text if the browser does not support HTML.
|
|
||||||
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Preserve MultipartWriter parts headers on write.
|
|
||||||
|
|
||||||
Refactor the way how ``Payload.headers`` are handled. Payload instances now always
|
|
||||||
have headers and Content-Type defined.
|
|
||||||
|
|
||||||
Fix Payload Content-Disposition header reset after initial creation.
|
|
||||||
`#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
|
|
||||||
- Log suppressed exceptions in ``GunicornWebWorker``.
|
|
||||||
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
|
|
||||||
- Remove wildcard imports.
|
|
||||||
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
|
|
||||||
- Use the same task for app initialization and web server handling in gunicorn workers.
|
|
||||||
It allows to use Python3.7 context vars smoothly.
|
|
||||||
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
|
|
||||||
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed data
|
|
||||||
`#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
|
|
||||||
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to avoid a deprecation warning.
|
|
||||||
`#3480 <https://github.com/aio-libs/aiohttp/issues/3480>`_
|
|
||||||
- ``Payload.size`` type annotation changed from `Optional[float]` to `Optional[int]`.
|
|
||||||
`#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
|
|
||||||
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
|
|
||||||
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Add documentation for ``aiohttp.web.HTTPException``.
|
|
||||||
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
|
|
||||||
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
|
|
||||||
3.5.1 (2018-12-24)
|
|
||||||
====================
|
|
||||||
|
|
||||||
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
|
|
||||||
mode.
|
|
||||||
|
|
||||||
3.5.0 (2018-12-22)
|
|
||||||
====================
|
|
||||||
|
|
||||||
Features
|
|
||||||
--------
|
|
||||||
|
|
||||||
- The library type annotations are checked in strict mode now.
|
|
||||||
- Add support for setting cookies for individual request (`#2387 <https://github.com/aio-libs/aiohttp/pull/2387>`_)
|
|
||||||
- Application.add_domain implementation (`#2809 <https://github.com/aio-libs/aiohttp/pull/2809>`_)
|
|
||||||
- The default ``app`` in the request returned by ``test_utils.make_mocked_request``
|
|
||||||
can now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174 <https://github.com/aio-libs/aiohttp/pull/3174>`_)
|
|
||||||
- Make ``request.url`` accessible when transport is closed. (`#3177 <https://github.com/aio-libs/aiohttp/pull/3177>`_)
|
|
||||||
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression to run in a background executor to avoid blocking the main thread and potentially triggering health check failures. (`#3205 <https://github.com/aio-libs/aiohttp/pull/3205>`_)
|
|
||||||
- Enable users to set `ClientTimeout` in `aiohttp.request` (`#3213 <https://github.com/aio-libs/aiohttp/pull/3213>`_)
|
|
||||||
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc`` file
|
|
||||||
doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
|
|
||||||
- Add default logging handler to web.run_app
|
|
||||||
|
|
||||||
If the `Application.debug` flag is set and the default logger `aiohttp.access` is used, access logs will now be output using a `stderr` `StreamHandler` if no handlers are attached. Furthermore, if the default logger has no log level set, the log level will be set to `DEBUG`. (`#3324 <https://github.com/aio-libs/aiohttp/pull/3324>`_)
|
|
||||||
- Add method argument to ``session.ws_connect()``.
|
|
||||||
|
|
||||||
Sometimes server API requires a different HTTP method for WebSocket connection establishment.
|
|
||||||
|
|
||||||
For example, ``Docker exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
|
|
||||||
- Create a task per request handling. (`#3406 <https://github.com/aio-libs/aiohttp/pull/3406>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Bugfixes
|
|
||||||
--------
|
|
||||||
|
|
||||||
- Enable passing `access_log_class` via `handler_args` (`#3158 <https://github.com/aio-libs/aiohttp/pull/3158>`_)
|
|
||||||
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186 <https://github.com/aio-libs/aiohttp/pull/3186>`_)
|
|
||||||
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
|
|
||||||
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
|
|
||||||
- Don't uppercase HTTP method in parser (`#3233 <https://github.com/aio-libs/aiohttp/pull/3233>`_)
|
|
||||||
- Make method match regexp RFC-7230 compliant (`#3235 <https://github.com/aio-libs/aiohttp/pull/3235>`_)
|
|
||||||
- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
|
|
||||||
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239 <https://github.com/aio-libs/aiohttp/pull/3239>`_)
|
|
||||||
- Change imports from collections module in preparation for 3.8. (`#3258 <https://github.com/aio-libs/aiohttp/pull/3258>`_)
|
|
||||||
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265 <https://github.com/aio-libs/aiohttp/pull/3265>`_)
|
|
||||||
- Fix forward compatibility with Python 3.8: importing ABCs directly from the collections module will not be supported anymore. (`#3273 <https://github.com/aio-libs/aiohttp/pull/3273>`_)
|
|
||||||
- Keep the query string by `normalize_path_middleware`. (`#3278 <https://github.com/aio-libs/aiohttp/pull/3278>`_)
|
|
||||||
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290 <https://github.com/aio-libs/aiohttp/pull/3290>`_)
|
|
||||||
- Bracket IPv6 addresses in the HOST header (`#3304 <https://github.com/aio-libs/aiohttp/pull/3304>`_)
|
|
||||||
- Fix default message for server ping and pong frames. (`#3308 <https://github.com/aio-libs/aiohttp/pull/3308>`_)
|
|
||||||
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
|
|
||||||
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function (`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
|
|
||||||
- Release HTTP response before raising status exception (`#3364 <https://github.com/aio-libs/aiohttp/pull/3364>`_)
|
|
||||||
- Fix task cancellation when ``sendfile()`` syscall is used by static file handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
|
|
||||||
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
|
|
||||||
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
- Improve documentation of ``Application.make_handler`` parameters. (`#3152 <https://github.com/aio-libs/aiohttp/pull/3152>`_)
|
|
||||||
- Fix BaseRequest.raw_headers doc. (`#3215 <https://github.com/aio-libs/aiohttp/pull/3215>`_)
|
|
||||||
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229 <https://github.com/aio-libs/aiohttp/pull/3229>`_)
|
|
||||||
- Make server access log format placeholder %b documentation reflect
|
|
||||||
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Deprecations and Removals
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
- Deprecate modification of ``session.requote_redirect_url`` (`#2278 <https://github.com/aio-libs/aiohttp/pull/2278>`_)
|
|
||||||
- Deprecate ``stream.unread_data()`` (`#3260 <https://github.com/aio-libs/aiohttp/pull/3260>`_)
|
|
||||||
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318 <https://github.com/aio-libs/aiohttp/pull/3318>`_)
|
|
||||||
- Encourage creation of aiohttp public objects inside a coroutine (`#3331 <https://github.com/aio-libs/aiohttp/pull/3331>`_)
|
|
||||||
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
|
|
||||||
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
|
|
||||||
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop`` properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
|
|
||||||
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381 <https://github.com/aio-libs/aiohttp/pull/3381>`_)
|
|
||||||
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385 <https://github.com/aio-libs/aiohttp/pull/3385>`_)
|
|
||||||
- Deprecate bare connector close, use ``async with connector:`` and ``await connector.close()`` instead. (`#3417 <https://github.com/aio-libs/aiohttp/pull/3417>`_)
|
|
||||||
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession`` constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
|
|
||||||
|
|
||||||
|
|
||||||
Misc
|
|
||||||
----
|
|
||||||
|
|
||||||
- #3341, #3351
|
|
||||||
|
|
@ -1,124 +0,0 @@
|
|||||||
aiohttp-3.5.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
aiohttp-3.5.4.dist-info/LICENSE.txt,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332
|
|
||||||
aiohttp-3.5.4.dist-info/METADATA,sha256=vpBjLbRZ9Tbi4DEj6aDUlGbj-HJPHa8Wihktdh4Z9U0,16950
|
|
||||||
aiohttp-3.5.4.dist-info/RECORD,,
|
|
||||||
aiohttp-3.5.4.dist-info/WHEEL,sha256=NGBd8VpwAMkmGKuUIURJaMXNI6PaAo_v5wy1RzFtRN4,106
|
|
||||||
aiohttp-3.5.4.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
|
||||||
aiohttp/__init__.py,sha256=GdkiBfeUQa38wScKQYLH4mRL-20MqARHC0ljV6Naf8w,4948
|
|
||||||
aiohttp/__pycache__/__init__.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/abc.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/base_protocol.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/client.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/client_exceptions.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/client_proto.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/client_reqrep.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/client_ws.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/connector.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/cookiejar.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/formdata.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/frozenlist.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/hdrs.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/helpers.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/http.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/http_exceptions.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/http_parser.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/http_websocket.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/http_writer.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/locks.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/log.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/multipart.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/payload.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/payload_streamer.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/pytest_plugin.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/resolver.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/signals.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/streams.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/tcp_helpers.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/test_utils.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/tracing.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/typedefs.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_app.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_exceptions.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_fileresponse.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_log.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_middlewares.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_protocol.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_request.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_response.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_routedef.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_runner.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_server.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_urldispatcher.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/web_ws.cpython-36.pyc,,
|
|
||||||
aiohttp/__pycache__/worker.cpython-36.pyc,,
|
|
||||||
aiohttp/_cparser.pxd,sha256=tgw30SL6kQSczzGMlMhx2Cuhf_O8P8ZPimVCb85xILc,3959
|
|
||||||
aiohttp/_find_header.c,sha256=lWc5w3UZiVd3ni60DuFDSSPzsaQUhAQcERDGBOqeML8,189932
|
|
||||||
aiohttp/_find_header.h,sha256=5oOgQ85nF6V7rpU8NhyE5vyGkTo1Cgf1GIYrtxSTzQI,170
|
|
||||||
aiohttp/_find_header.pxd,sha256=0GfwFCPN2zxEKTO1_MA5sYq2UfzsG8kcV3aTqvwlz3g,68
|
|
||||||
aiohttp/_frozenlist.c,sha256=TGMrV7V3PPs6wAktCttiCr_8p2Qn8uRQb7hFFag9x6A,287339
|
|
||||||
aiohttp/_frozenlist.cp36-win_amd64.pyd,sha256=9JUCj2BYRIghlufCf0SAmKzWsu4js27eI0xZLUfrVEw,61440
|
|
||||||
aiohttp/_frozenlist.pyx,sha256=BD8LcERExsWdo4qzuuQ84f-L_pHVzkUQO0lEAOe3Fog,2605
|
|
||||||
aiohttp/_headers.pxi,sha256=XgJL5FQRwL4uZQfegYShPclsErUlvG_xuMHs7dp_2-o,2027
|
|
||||||
aiohttp/_helpers.c,sha256=Kf2ro8IvT0TTxfB_P1yPBEZXYi597J9-Pc6a1HW_m34,207049
|
|
||||||
aiohttp/_helpers.cp36-win_amd64.pyd,sha256=qd3DgclMuKf3cMVTEusoYVVdutfgTiYNxSVp0WVvQ8k,46080
|
|
||||||
aiohttp/_helpers.pyi,sha256=mJRb5YdG8DxYbPfVddGRGmi93qqaJM30L1qFpgSKQuA,204
|
|
||||||
aiohttp/_helpers.pyx,sha256=XeLbNft5X_4ifi8QB8i6TyrRuayijMSO3IDHeSA89uM,1049
|
|
||||||
aiohttp/_http_parser.c,sha256=RsN44f-VDf-s6Pdhpeowlqu_tQK6qieHVdM9JoWB_No,994100
|
|
||||||
aiohttp/_http_parser.cp36-win_amd64.pyd,sha256=bmU4boNpDPaQkrImibWgMwJDyUsLH1LqcZzGY3K7NbA,270336
|
|
||||||
aiohttp/_http_parser.pyx,sha256=qAeXR88_UXU2ontoLIq7hg7M2KHjY982iJeH_u7aXXs,28672
|
|
||||||
aiohttp/_http_writer.c,sha256=koVxrMta9W_hOkYK_8S4UD2zjnZo_T-umufxwePAhTc,205822
|
|
||||||
aiohttp/_http_writer.cp36-win_amd64.pyd,sha256=i-iBsVTcStCakBdONEnWMM6QMbRIhFEYTYNRDplP-8s,38912
|
|
||||||
aiohttp/_http_writer.pyx,sha256=vnanyXytNqyi6oqxELg5ARJ8LhtB8mDGxNfz6DdvH6E,4193
|
|
||||||
aiohttp/_websocket.c,sha256=JVNi7H1-CxOO1Upw5b9b_IA1IKLp9B95_H8gZhhNkvg,135136
|
|
||||||
aiohttp/_websocket.cp36-win_amd64.pyd,sha256=RrJYhQnL14kjT7Q5GvqsoVAtdpg683K52S_gZ0lcOWc,27648
|
|
||||||
aiohttp/_websocket.pyx,sha256=tJfygcVwKF_Xb6Pg48a6t50YO2xY4Rg0Wj7LcJJMi-U,1559
|
|
||||||
aiohttp/abc.py,sha256=lsf2bz-9KtqLhtI-e-tmgp3ynziMypYyEHvwOnFg7lQ,5392
|
|
||||||
aiohttp/base_protocol.py,sha256=kv6AbDw8ZQOyB9Hm2bOaPZyXcAbUUwFOO2lbAmArpfw,2644
|
|
||||||
aiohttp/client.py,sha256=hXh0WgGqhl80gVDlkuzgrHVaCxxkg_A9_mrhOkdSb-s,42549
|
|
||||||
aiohttp/client_exceptions.py,sha256=3e7SWwDXDhUO5npOhwgdL6K8tXMTdVyv58rjQboY4Yo,7547
|
|
||||||
aiohttp/client_proto.py,sha256=l1bLzhVx8hHOuS8nBPH6wNU15S-P6z_OMtpx_tPRi54,8001
|
|
||||||
aiohttp/client_reqrep.py,sha256=LUhjuCGyJs55LcH_Sr3AMcAhS1XlcCPM73rc8C3_GV0,35793
|
|
||||||
aiohttp/client_ws.py,sha256=AQlj-peBA0mGyra1t38sWlfV28MEM0SAATRXp1TsF9I,10694
|
|
||||||
aiohttp/connector.py,sha256=AORmJFz8WLuAjca5O582FKCC74f6emuXdZfhWzvPpx4,39556
|
|
||||||
aiohttp/cookiejar.py,sha256=ghkcBC9JhqKFz3InpJ4l2_stXLVv6qORX1303vepQUI,11268
|
|
||||||
aiohttp/formdata.py,sha256=VZCo9kmDb50lQUcRMDfAH3d5lnRxBq_AX38ge8vFI00,5807
|
|
||||||
aiohttp/frozenlist.py,sha256=I4zR368wRHXp402Z3f5lhd5i48b6A66MhHncW1JGkb4,1781
|
|
||||||
aiohttp/frozenlist.pyi,sha256=fkQEKqDR6nOjXDx2cXvfCcetoMQQdzjXs2uoA7uVaP4,1431
|
|
||||||
aiohttp/hdrs.py,sha256=iaXnHXOR_Dx0rvVkvmIZhc-7Egf2ByuSDI9tqskS0kQ,3449
|
|
||||||
aiohttp/helpers.py,sha256=q_AZMU7hOJBvtTklhQpwa1DTH3uR5h2ZA0vLlsVGSQs,22633
|
|
||||||
aiohttp/http.py,sha256=mYXbwDI8bF9D1RShF0EGtVTx7OgIyksbmKR4b_4RgBo,1385
|
|
||||||
aiohttp/http_exceptions.py,sha256=yb2XryY_kktgiADcYn1nS0Dm-RVhhy0J6R0qfg-JyWo,2358
|
|
||||||
aiohttp/http_parser.py,sha256=v9csKsBv-rmOir1ikRBcDJDAaPMsFen1HoP8_Viz6xE,27912
|
|
||||||
aiohttp/http_websocket.py,sha256=GpysCWVOOQyRzvLSq0IHhVG0goWSnv5Rmwf91uUwowI,24594
|
|
||||||
aiohttp/http_writer.py,sha256=XhGCqy_lzdLyxIzjQ_ufPFfJKTTWx1sb6YZWvrOFUPA,5239
|
|
||||||
aiohttp/locks.py,sha256=l-cW8wUbIkHaovghT7gpY8Yp5Vlo-u2G7_CR5xQqEQ8,1234
|
|
||||||
aiohttp/log.py,sha256=kOWU6EcyBQESISm27vc8dVEz_h9zxozLa5WCya1RzhQ,325
|
|
||||||
aiohttp/multipart.py,sha256=h76ZKaEdP2moxWK0qNydR7zYMgGMoyqkkRssTmrtx1A,32277
|
|
||||||
aiohttp/payload.py,sha256=QjzdcLJ89GGqFSN_SdMgEvw_Id4UEXZ9mL_2fAGF4gk,14027
|
|
||||||
aiohttp/payload_streamer.py,sha256=ZNWaWwAxOIricwfjH4-YrkCqehowVizM6fJ_JVDR480,2103
|
|
||||||
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
|
|
||||||
aiohttp/pytest_plugin.py,sha256=8KOUt8KXu_3NkPQ8DYwgqKfdAvVZ--zHnm0EQiKFPkI,10332
|
|
||||||
aiohttp/resolver.py,sha256=pRF91jOjTNuCll5TMRjTe1OxnGZK4wjAggYLgvzXkGQ,3626
|
|
||||||
aiohttp/signals.py,sha256=_ge2XQXBDWHoyCI4E-nXC-sOEJGVrJm0zYGHH0E5woQ,948
|
|
||||||
aiohttp/signals.pyi,sha256=mrEA9Ve08W22L_yI8_F7PkdQUjid_VsL3o9tcC0Ud0E,325
|
|
||||||
aiohttp/streams.py,sha256=i1Q7_RzolpEQ63AkalkeeSHsMPOaHAfjnwlxvRmYi-k,20371
|
|
||||||
aiohttp/tcp_helpers.py,sha256=1WVYM2C-HZQpgcksTyadRsl2_WeuXh_ECUxCcwji5d8,1631
|
|
||||||
aiohttp/test_utils.py,sha256=0a0034sQM72grdRxjTnYpHtkUvMwstshfc9jVPXsZ1U,20525
|
|
||||||
aiohttp/tracing.py,sha256=yfOJWzRQgRdDcdjsDLqPul3anYyVFhztDeyoM01oIq8,12662
|
|
||||||
aiohttp/typedefs.py,sha256=6HXEWJNZGUuNewFQUjSkCzKP8rQVZSKqfdNnIgofZWs,1259
|
|
||||||
aiohttp/web.py,sha256=2edP5uK2BU6wTXAWzGp2lgYq_CyU3vzLaQa0I_Ehg_0,15121
|
|
||||||
aiohttp/web_app.py,sha256=vKuHVhH9d-Qg5Pg1A8MbaZPeJttkSsghpuo2JYvUJks,17212
|
|
||||||
aiohttp/web_exceptions.py,sha256=-CQI325lMa9W-1WeJ2RlHApOOQ74ctHd6OyeKG_EyT4,10079
|
|
||||||
aiohttp/web_fileresponse.py,sha256=0Oln1kTqD80EhftG2jqVbsuSLr0Gbjpuk4T3D06fFjk,12712
|
|
||||||
aiohttp/web_log.py,sha256=J33FXqV36hWcyk8YfFNXDj3SI40uoOQzEX2Fhni7bzc,8269
|
|
||||||
aiohttp/web_middlewares.py,sha256=BY05dLo9rsRZttRmjDUHEokiHQLzW_ffENZL9q-Grf4,4188
|
|
||||||
aiohttp/web_protocol.py,sha256=q0zEVHMSLdmUw_KdI6zVeOj_k3lLZWMj4PJHo8h9c54,21394
|
|
||||||
aiohttp/web_request.py,sha256=M8ARRuEso-V7G675-xWY-lqLBGDmBVRGPujaufKZGuo,25234
|
|
||||||
aiohttp/web_response.py,sha256=nmldFBqLLaCECoaYUw54-2BVHB6Xz6XgGMK0O5ymrjo,25511
|
|
||||||
aiohttp/web_routedef.py,sha256=jQ8Y0hDHYuMBTtsuo17qjkQLBMoacbkh4zaUdwSJJ8s,6077
|
|
||||||
aiohttp/web_runner.py,sha256=_LUDpAc6vDOWfNJ-DBj3NZPtID0gBPH6JeMXtGSt4OU,10088
|
|
||||||
aiohttp/web_server.py,sha256=527MjryEIqWArFHMJlEABg3TcZgYtyJIFHY19Yvf3AI,2165
|
|
||||||
aiohttp/web_urldispatcher.py,sha256=x-O0Tqxn6xqMdQ5Qrg0hxIli-DbOfxLEDpgX_j_FGQU,38788
|
|
||||||
aiohttp/web_ws.py,sha256=7UpGsVFZw_YtpJOWPLeDnGmL6PtirxAkc8r-pGUQbt0,17082
|
|
||||||
aiohttp/worker.py,sha256=hekSLWLEJVrHrIrZ3dQga7Jzgtx_Cf3ZW7Zfd1J1G3A,8178
|
|
661
venv/Lib/site-packages/aiohttp-3.6.3.dist-info/METADATA
Normal file
661
venv/Lib/site-packages/aiohttp-3.6.3.dist-info/METADATA
Normal file
@ -0,0 +1,661 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: aiohttp
|
||||||
|
Version: 3.6.3
|
||||||
|
Summary: Async http client/server framework (asyncio)
|
||||||
|
Home-page: https://github.com/aio-libs/aiohttp
|
||||||
|
Author: Nikolay Kim
|
||||||
|
Author-email: fafhrd91@gmail.com
|
||||||
|
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
Maintainer-email: aio-libs@googlegroups.com
|
||||||
|
License: Apache 2
|
||||||
|
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
|
||||||
|
Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/aio-libs/aiohttp
|
||||||
|
Project-URL: CI: Circle, https://circleci.com/gh/aio-libs/aiohttp
|
||||||
|
Project-URL: CI: Shippable, https://app.shippable.com/github/aio-libs/aiohttp
|
||||||
|
Project-URL: CI: Travis, https://travis-ci.com/aio-libs/aiohttp
|
||||||
|
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
||||||
|
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
||||||
|
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
||||||
|
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Operating System :: POSIX
|
||||||
|
Classifier: Operating System :: MacOS :: MacOS X
|
||||||
|
Classifier: Operating System :: Microsoft :: Windows
|
||||||
|
Classifier: Topic :: Internet :: WWW/HTTP
|
||||||
|
Classifier: Framework :: AsyncIO
|
||||||
|
Requires-Python: >=3.5.3
|
||||||
|
Requires-Dist: attrs (>=17.3.0)
|
||||||
|
Requires-Dist: chardet (<4.0,>=2.0)
|
||||||
|
Requires-Dist: multidict (<5.0,>=4.5)
|
||||||
|
Requires-Dist: async-timeout (<4.0,>=3.0)
|
||||||
|
Requires-Dist: yarl (<1.6.0,>=1.0)
|
||||||
|
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
|
||||||
|
Requires-Dist: typing-extensions (>=3.6.5) ; python_version < "3.7"
|
||||||
|
Provides-Extra: speedups
|
||||||
|
Requires-Dist: aiodns ; extra == 'speedups'
|
||||||
|
Requires-Dist: brotlipy ; extra == 'speedups'
|
||||||
|
Requires-Dist: cchardet ; extra == 'speedups'
|
||||||
|
|
||||||
|
==================================
|
||||||
|
Async http client/server framework
|
||||||
|
==================================
|
||||||
|
|
||||||
|
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
|
||||||
|
:height: 64px
|
||||||
|
:width: 64px
|
||||||
|
:alt: aiohttp logo
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.com/aio-libs/aiohttp.svg?branch=master
|
||||||
|
:target: https://travis-ci.com/aio-libs/aiohttp
|
||||||
|
:align: right
|
||||||
|
:alt: Travis status for master branch
|
||||||
|
|
||||||
|
.. image:: https://ci.appveyor.com/api/projects/status/tnddy9k6pphl8w7k/branch/master?svg=true
|
||||||
|
:target: https://ci.appveyor.com/project/aio-libs/aiohttp
|
||||||
|
:align: right
|
||||||
|
:alt: AppVeyor status for master branch
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
||||||
|
:target: https://codecov.io/gh/aio-libs/aiohttp
|
||||||
|
:alt: codecov.io status for master branch
|
||||||
|
|
||||||
|
.. image:: https://badge.fury.io/py/aiohttp.svg
|
||||||
|
:target: https://pypi.org/project/aiohttp
|
||||||
|
:alt: Latest PyPI package version
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
||||||
|
:target: https://docs.aiohttp.org/
|
||||||
|
:alt: Latest Read The Docs
|
||||||
|
|
||||||
|
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
||||||
|
:target: https://gitter.im/aio-libs/Lobby
|
||||||
|
:alt: Chat on Gitter
|
||||||
|
|
||||||
|
Key Features
|
||||||
|
============
|
||||||
|
|
||||||
|
- Supports both client and server side of HTTP protocol.
|
||||||
|
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
||||||
|
Callback Hell.
|
||||||
|
- Provides Web-server with middlewares and pluggable routing.
|
||||||
|
|
||||||
|
|
||||||
|
Getting started
|
||||||
|
===============
|
||||||
|
|
||||||
|
Client
|
||||||
|
------
|
||||||
|
|
||||||
|
To get something from the web:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
async def fetch(session, url):
|
||||||
|
async with session.get(url) as response:
|
||||||
|
return await response.text()
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
html = await fetch(session, 'http://python.org')
|
||||||
|
print(html)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(main())
|
||||||
|
|
||||||
|
|
||||||
|
Server
|
||||||
|
------
|
||||||
|
|
||||||
|
An example using a simple server:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# examples/server_simple.py
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
async def handle(request):
|
||||||
|
name = request.match_info.get('name', "Anonymous")
|
||||||
|
text = "Hello, " + name
|
||||||
|
return web.Response(text=text)
|
||||||
|
|
||||||
|
async def wshandle(request):
|
||||||
|
ws = web.WebSocketResponse()
|
||||||
|
await ws.prepare(request)
|
||||||
|
|
||||||
|
async for msg in ws:
|
||||||
|
if msg.type == web.WSMsgType.text:
|
||||||
|
await ws.send_str("Hello, {}".format(msg.data))
|
||||||
|
elif msg.type == web.WSMsgType.binary:
|
||||||
|
await ws.send_bytes(msg.data)
|
||||||
|
elif msg.type == web.WSMsgType.close:
|
||||||
|
break
|
||||||
|
|
||||||
|
return ws
|
||||||
|
|
||||||
|
|
||||||
|
app = web.Application()
|
||||||
|
app.add_routes([web.get('/', handle),
|
||||||
|
web.get('/echo', wshandle),
|
||||||
|
web.get('/{name}', handle)])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
web.run_app(app)
|
||||||
|
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
=============
|
||||||
|
|
||||||
|
https://aiohttp.readthedocs.io/
|
||||||
|
|
||||||
|
|
||||||
|
Demos
|
||||||
|
=====
|
||||||
|
|
||||||
|
https://github.com/aio-libs/aiohttp-demos
|
||||||
|
|
||||||
|
|
||||||
|
External links
|
||||||
|
==============
|
||||||
|
|
||||||
|
* `Third party libraries
|
||||||
|
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
||||||
|
* `Built with aiohttp
|
||||||
|
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
||||||
|
* `Powered by aiohttp
|
||||||
|
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
||||||
|
|
||||||
|
Feel free to make a Pull Request for adding your link to these pages!
|
||||||
|
|
||||||
|
|
||||||
|
Communication channels
|
||||||
|
======================
|
||||||
|
|
||||||
|
*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs
|
||||||
|
|
||||||
|
Feel free to post your questions and ideas here.
|
||||||
|
|
||||||
|
*gitter chat* https://gitter.im/aio-libs/Lobby
|
||||||
|
|
||||||
|
We support `Stack Overflow
|
||||||
|
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
||||||
|
Please add *aiohttp* tag to your question there.
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
============
|
||||||
|
|
||||||
|
- Python >= 3.5.3
|
||||||
|
- async-timeout_
|
||||||
|
- attrs_
|
||||||
|
- chardet_
|
||||||
|
- multidict_
|
||||||
|
- yarl_
|
||||||
|
|
||||||
|
Optionally you may install the cChardet_ and aiodns_ libraries (highly
|
||||||
|
recommended for sake of speed).
|
||||||
|
|
||||||
|
.. _chardet: https://pypi.python.org/pypi/chardet
|
||||||
|
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
||||||
|
.. _attrs: https://github.com/python-attrs/attrs
|
||||||
|
.. _multidict: https://pypi.python.org/pypi/multidict
|
||||||
|
.. _yarl: https://pypi.python.org/pypi/yarl
|
||||||
|
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
||||||
|
.. _cChardet: https://pypi.python.org/pypi/cchardet
|
||||||
|
|
||||||
|
License
|
||||||
|
=======
|
||||||
|
|
||||||
|
``aiohttp`` is offered under the Apache 2 license.
|
||||||
|
|
||||||
|
|
||||||
|
Keepsafe
|
||||||
|
========
|
||||||
|
|
||||||
|
The aiohttp community would like to thank Keepsafe
|
||||||
|
(https://www.getkeepsafe.com) for its support in the early days of
|
||||||
|
the project.
|
||||||
|
|
||||||
|
|
||||||
|
Source code
|
||||||
|
===========
|
||||||
|
|
||||||
|
The latest developer version is available in a GitHub repository:
|
||||||
|
https://github.com/aio-libs/aiohttp
|
||||||
|
|
||||||
|
Benchmarks
|
||||||
|
==========
|
||||||
|
|
||||||
|
If you are interested in efficiency, the AsyncIO community maintains a
|
||||||
|
list of benchmarks on the official wiki:
|
||||||
|
https://github.com/python/asyncio/wiki/Benchmarks
|
||||||
|
|
||||||
|
=========
|
||||||
|
Changelog
|
||||||
|
=========
|
||||||
|
|
||||||
|
..
|
||||||
|
You should *NOT* be adding new change log entries to this file, this
|
||||||
|
file is managed by towncrier. You *may* edit previous change logs to
|
||||||
|
fix problems like typo corrections or such.
|
||||||
|
To add a new change log entry, please see
|
||||||
|
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
|
||||||
|
we named the news folder "changes".
|
||||||
|
|
||||||
|
WARNING: Don't drop the next directive!
|
||||||
|
|
||||||
|
.. towncrier release notes start
|
||||||
|
|
||||||
|
3.6.3 (2020-10-12)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
|
||||||
|
release.
|
||||||
|
|
||||||
|
3.6.2 (2019-10-09)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Made exceptions pickleable. Also changed the repr of some exceptions.
|
||||||
|
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
|
||||||
|
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
|
||||||
|
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Reset the ``sock_read`` timeout each time data is received for a
|
||||||
|
``aiohttp.ClientResponse``. `#3808
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3808>`_
|
||||||
|
- Fix handling of expired cookies so they are not stored in CookieJar.
|
||||||
|
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
|
||||||
|
- Fix misleading message in the string representation of ``ClientConnectorError``;
|
||||||
|
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/4097>`_
|
||||||
|
- Don't clobber HTTP status when using FileResponse.
|
||||||
|
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Added minimal required logging configuration to logging documentation.
|
||||||
|
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
|
||||||
|
- Update docs to reflect proxy support.
|
||||||
|
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
|
||||||
|
- Fix typo in code example in testing docs.
|
||||||
|
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
|
||||||
|
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.6.1 (2019-09-19)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Compatibility with Python 3.8.
|
||||||
|
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- correct some exception string format
|
||||||
|
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
|
||||||
|
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
|
||||||
|
unavailable because the runtime is built against
|
||||||
|
an outdated OpenSSL.
|
||||||
|
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
|
||||||
|
- Update multidict requirement to >= 4.5
|
||||||
|
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
|
||||||
|
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.6.0 (2019-09-06)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
|
||||||
|
Proactor event loop to work. `#3629
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3629>`_
|
||||||
|
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
|
||||||
|
compatible with more http proxy servers. `#3798
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3798>`_
|
||||||
|
- Accept non-GET request for starting websocket handshake on server side.
|
||||||
|
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Raise a ClientResponseError instead of an AssertionError for a blank
|
||||||
|
HTTP Reason Phrase.
|
||||||
|
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
|
||||||
|
- Fix an issue where cookies would sometimes not be set during a redirect.
|
||||||
|
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
|
||||||
|
- Change normalize_path_middleware to use 308 redirect instead of 301.
|
||||||
|
|
||||||
|
This behavior should prevent clients from being unable to use PUT/POST
|
||||||
|
methods on endpoints that are redirected because of a trailing slash.
|
||||||
|
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
|
||||||
|
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
|
||||||
|
task with unhandled exception when the server is used in conjunction with
|
||||||
|
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
|
||||||
|
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
|
||||||
|
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
|
||||||
|
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
|
||||||
|
- Use sanitized URL as Location header in redirects
|
||||||
|
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
|
||||||
|
- Improve typing annotations for multipart.py along with changes required
|
||||||
|
by mypy in files that references multipart.py.
|
||||||
|
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
|
||||||
|
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
|
||||||
|
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
|
||||||
|
- Cleanup per-chunk data in generic data read. Memory leak fixed.
|
||||||
|
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
|
||||||
|
- Use correct type for add_view and family
|
||||||
|
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
|
||||||
|
- Fix _keepalive field in __slots__ of ``RequestHandler``.
|
||||||
|
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
|
||||||
|
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
|
||||||
|
transport" exception when clients disconnect uncleanly.
|
||||||
|
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
|
||||||
|
- Suppress pytest warnings due to ``test_utils`` classes
|
||||||
|
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
|
||||||
|
- Fix overshadowing of overlapped sub-application prefixes.
|
||||||
|
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
|
||||||
|
- Fixed return type annotation for WSMessage.json()
|
||||||
|
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
|
||||||
|
- Properly expose TooManyRedirects publicly as documented.
|
||||||
|
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
|
||||||
|
- Fix missing brackets for IPv6 in proxy CONNECT request
|
||||||
|
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
|
||||||
|
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
|
||||||
|
``asyncio.ClientSession.request`` according to the docs `#3852
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3852>`_
|
||||||
|
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
|
||||||
|
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
|
||||||
|
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
|
||||||
|
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
|
||||||
|
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
|
||||||
|
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
|
||||||
|
- Add URL to the string representation of ClientResponseError.
|
||||||
|
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
|
||||||
|
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
|
||||||
|
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
|
||||||
|
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
|
||||||
|
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
|
||||||
|
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
|
||||||
|
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
|
||||||
|
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
|
||||||
|
- use ``if __name__ == '__main__':`` in server examples.
|
||||||
|
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
|
||||||
|
- Update documentation reference to the default access logger.
|
||||||
|
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
|
||||||
|
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
|
||||||
|
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
|
||||||
|
- Removed deprecation warning in tracing example docs
|
||||||
|
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.5.4 (2019-01-12)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
|
||||||
|
partial content only in case of compressed content
|
||||||
|
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
|
||||||
|
|
||||||
|
|
||||||
|
3.5.3 (2019-01-10)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
|
||||||
|
``access_log=True`` and the event loop being in debug mode. `#3504
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3504>`_
|
||||||
|
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
|
||||||
|
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
|
||||||
|
- Send custom per-request cookies even if session jar is empty
|
||||||
|
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
|
||||||
|
- Restore Linux binary wheels publishing on PyPI
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.5.2 (2019-01-08)
|
||||||
|
==================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
|
||||||
|
with files asynchronously. I/O based payloads from ``payload.py`` uses a
|
||||||
|
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3313>`_
|
||||||
|
- Internal Server Errors in plain text if the browser does not support HTML.
|
||||||
|
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Preserve MultipartWriter parts headers on write. Refactor the way how
|
||||||
|
``Payload.headers`` are handled. Payload instances now always have headers and
|
||||||
|
Content-Type defined. Fix Payload Content-Disposition header reset after initial
|
||||||
|
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
|
||||||
|
- Log suppressed exceptions in ``GunicornWebWorker``.
|
||||||
|
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
|
||||||
|
- Remove wildcard imports.
|
||||||
|
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
|
||||||
|
- Use the same task for app initialization and web server handling in gunicorn workers.
|
||||||
|
It allows to use Python3.7 context vars smoothly.
|
||||||
|
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
|
||||||
|
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
|
||||||
|
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
|
||||||
|
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
|
||||||
|
avoid a deprecation warning. `#3480
|
||||||
|
<https://github.com/aio-libs/aiohttp/issues/3480>`_
|
||||||
|
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
|
||||||
|
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
|
||||||
|
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
|
||||||
|
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Add documentation for ``aiohttp.web.HTTPException``.
|
||||||
|
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
|
||||||
|
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
|
||||||
|
3.5.1 (2018-12-24)
|
||||||
|
====================
|
||||||
|
|
||||||
|
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
|
||||||
|
mode.
|
||||||
|
|
||||||
|
3.5.0 (2018-12-22)
|
||||||
|
====================
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
- The library type annotations are checked in strict mode now.
|
||||||
|
- Add support for setting cookies for individual request (`#2387
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
|
||||||
|
- Application.add_domain implementation (`#2809
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
|
||||||
|
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
|
||||||
|
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
|
||||||
|
- Make ``request.url`` accessible when transport is closed. (`#3177
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
|
||||||
|
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
|
||||||
|
to run in a background executor to avoid blocking the main thread and potentially
|
||||||
|
triggering health check failures. (`#3205
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
|
||||||
|
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
|
||||||
|
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
|
||||||
|
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
|
||||||
|
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
|
||||||
|
and the default logger ``aiohttp.access`` is used, access logs will now be output
|
||||||
|
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
|
||||||
|
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
|
||||||
|
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
|
||||||
|
different HTTP method for WebSocket connection establishment. For example, ``Docker
|
||||||
|
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
|
||||||
|
- Create a task per request handling. (`#3406
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
|
||||||
|
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
|
||||||
|
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
|
||||||
|
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
|
||||||
|
- Don't uppercase HTTP method in parser (`#3233
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
|
||||||
|
- Make method match regexp RFC-7230 compliant (`#3235
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
|
||||||
|
- Add ``app.pre_frozen`` state to properly handle startup signals in
|
||||||
|
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
|
||||||
|
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
|
||||||
|
- Change imports from collections module in preparation for 3.8. (`#3258
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
|
||||||
|
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
|
||||||
|
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
|
||||||
|
collections module will not be supported anymore. (`#3273
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
|
||||||
|
- Keep the query string by ``normalize_path_middleware``. (`#3278
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
|
||||||
|
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
|
||||||
|
- Bracket IPv6 addresses in the HOST header (`#3304
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
|
||||||
|
- Fix default message for server ping and pong frames. (`#3308
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
|
||||||
|
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
|
||||||
|
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
|
||||||
|
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
|
||||||
|
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
|
||||||
|
- Release HTTP response before raising status exception (`#3364
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
|
||||||
|
- Fix task cancellation when ``sendfile()`` syscall is used by static file
|
||||||
|
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
|
||||||
|
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
|
||||||
|
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Improved Documentation
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
|
||||||
|
- Fix BaseRequest.raw_headers doc. (`#3215
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
|
||||||
|
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
|
||||||
|
- Make server access log format placeholder %b documentation reflect
|
||||||
|
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Deprecations and Removals
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
|
||||||
|
- Deprecate ``stream.unread_data()`` (`#3260
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
|
||||||
|
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
|
||||||
|
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
|
||||||
|
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
|
||||||
|
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
|
||||||
|
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
|
||||||
|
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
|
||||||
|
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
|
||||||
|
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
|
||||||
|
- Deprecate bare connector close, use ``async with connector:`` and ``await
|
||||||
|
connector.close()`` instead. (`#3417
|
||||||
|
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
|
||||||
|
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
|
||||||
|
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
|
||||||
|
|
||||||
|
|
||||||
|
Misc
|
||||||
|
----
|
||||||
|
|
||||||
|
- #3341, #3351
|
||||||
|
|
124
venv/Lib/site-packages/aiohttp-3.6.3.dist-info/RECORD
Normal file
124
venv/Lib/site-packages/aiohttp-3.6.3.dist-info/RECORD
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
aiohttp-3.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
aiohttp-3.6.3.dist-info/LICENSE.txt,sha256=atcq6P9K6Td0Wq4oBfNDqYf6o6YGrHLGCfLUj3GZspQ,11533
|
||||||
|
aiohttp-3.6.3.dist-info/METADATA,sha256=UmObIpkmeVqZFB1rwDUEbIzmKMrVaEaEfDZulA48d_g,24570
|
||||||
|
aiohttp-3.6.3.dist-info/RECORD,,
|
||||||
|
aiohttp-3.6.3.dist-info/WHEEL,sha256=SktxJNdVgf2GQ0Rnj47KhGrFXFhp4j6ROtWy0UXB1eM,106
|
||||||
|
aiohttp-3.6.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
||||||
|
aiohttp/__init__.py,sha256=LagwdvgQ1g_JFcWIC-PjM8StM1gLulnzbjJ1UncMXoI,8427
|
||||||
|
aiohttp/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/abc.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/base_protocol.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_exceptions.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_proto.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_reqrep.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/client_ws.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/connector.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/cookiejar.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/formdata.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/frozenlist.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/hdrs.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/helpers.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_exceptions.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_parser.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_websocket.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/http_writer.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/locks.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/log.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/multipart.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/payload.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/payload_streamer.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/pytest_plugin.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/resolver.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/signals.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/streams.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/tcp_helpers.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/test_utils.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/tracing.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/typedefs.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_app.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_exceptions.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_fileresponse.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_log.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_middlewares.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_protocol.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_request.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_response.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_routedef.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_runner.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_server.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_urldispatcher.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/web_ws.cpython-36.pyc,,
|
||||||
|
aiohttp/__pycache__/worker.cpython-36.pyc,,
|
||||||
|
aiohttp/_cparser.pxd,sha256=xvsLl13ZXXyHGyb2Us7WsLncndQrxhyGB4KXnvbsRtQ,4099
|
||||||
|
aiohttp/_find_header.c,sha256=MOZn07_ot-UcOdQBpYAWQmyigqLvMwkqa_7l4M7D1dI,199932
|
||||||
|
aiohttp/_find_header.h,sha256=HistyxY7K3xEJ53Y5xEfwrDVDkfcV0zQ9mkzMgzi_jo,184
|
||||||
|
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
|
||||||
|
aiohttp/_frozenlist.c,sha256=-vfgzV6cNjUykuqt1kkWDiT2U92BR2zhL9b9yDiiodg,288943
|
||||||
|
aiohttp/_frozenlist.cp36-win_amd64.pyd,sha256=ChZp9r6iFxKYd5M_zgBVSnfuG69razOy5oZNxQ_mfyo,69120
|
||||||
|
aiohttp/_frozenlist.pyx,sha256=SB851KmtWpiJ2ZB05Tpo4855VkCyRtgMs843Wz8kFeg,2713
|
||||||
|
aiohttp/_headers.pxi,sha256=PxiakDsuEs0O94eHRlPcerO24TqPAxc0BtX86XZL4gw,2111
|
||||||
|
aiohttp/_helpers.c,sha256=sQcHpEGAX3jEvA8jujh4_D_fev9cRjMAc5CySqtHYrg,208657
|
||||||
|
aiohttp/_helpers.cp36-win_amd64.pyd,sha256=vaRlMUUMLYsjE0c8OyK-IYIhYr3U-V6_2O3PJCrpcfc,50176
|
||||||
|
aiohttp/_helpers.pyi,sha256=C6Q4W8EwElvD1gF1siRGMVG7evEX8fWWstZHL1BbsDA,212
|
||||||
|
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
|
||||||
|
aiohttp/_http_parser.c,sha256=W1sETtDrrBdnBiSOpqaDcO9DcE9zhyLjPTq4WKIK0bc,997494
|
||||||
|
aiohttp/_http_parser.cp36-win_amd64.pyd,sha256=GQaatWLpRa9ldsKnE7Dg20DkDJZBjsg_v0fbSR2YUo0,248320
|
||||||
|
aiohttp/_http_parser.pyx,sha256=C2XxooYRput7XPQzbaGMDrtvJtmhWa58SDPytyuAwGk,29577
|
||||||
|
aiohttp/_http_writer.c,sha256=-wuBZwiaUXEy1Zj-R5BD5igH7cUg_CYb5ZvYMsh8vzo,211620
|
||||||
|
aiohttp/_http_writer.cp36-win_amd64.pyd,sha256=yxFOMEepS8vykxlCNgXRJL-PeaDQhuhdw7pU3yryUGI,44032
|
||||||
|
aiohttp/_http_writer.pyx,sha256=TzCawCBLMe7w9eX2SEcUcLYySwkFfrfjaEYHS0Uvjtg,4353
|
||||||
|
aiohttp/_websocket.c,sha256=JrG6bXW3OR8sfxl5V1Q3VTXvGBbFTYgzgdbhQHr3LGI,136606
|
||||||
|
aiohttp/_websocket.cp36-win_amd64.pyd,sha256=BoTx06YtxrQWEGuQrGXey7sg5ZU4Fy40TMUv7UIu8H0,29184
|
||||||
|
aiohttp/_websocket.pyx,sha256=Ig8jXl_wkAXPugEWS0oPYo0-BnL8zT7uBG6BrYqVXdA,1613
|
||||||
|
aiohttp/abc.py,sha256=s3wtDI3os8uX4FdQbsvJwr67cFGhylif0mR5k2SKY04,5600
|
||||||
|
aiohttp/base_protocol.py,sha256=5PJImwc0iX8kR3VjZn1D_SAeL-6JKERi87iGHEYjJQ4,2744
|
||||||
|
aiohttp/client.py,sha256=DYv-h8V2wljt4hRmPDmU2czk9zSlSn8zua9MgssSEiY,45130
|
||||||
|
aiohttp/client_exceptions.py,sha256=RCbzCGw_HcaqnL4AHf3nol32xH_2xu1hrYbLNgpjHqk,8786
|
||||||
|
aiohttp/client_proto.py,sha256=XDXJ0G9RW8m80wHahzjgp4T5S3Rf6LSYks9Q9MajSQg,8276
|
||||||
|
aiohttp/client_reqrep.py,sha256=zf6GFaDYvpy50HZ4GntrT8flcc6B4HfwnlHw_yYdGMw,37064
|
||||||
|
aiohttp/client_ws.py,sha256=OUkkw9RwRHRmAakBibE6c63VLMWGVgoyRadoC22wtNY,10995
|
||||||
|
aiohttp/connector.py,sha256=pbq2XHrujiyQXbIhzXQK6E1zrzRYedzt8xlGNmvbQcM,43672
|
||||||
|
aiohttp/cookiejar.py,sha256=lNwvnGX3BjIDU4btE50AUsBQditLXzJhsPPUMZo-dkI,12249
|
||||||
|
aiohttp/formdata.py,sha256=1yNFnS6O0wUrIL4_V66-DwyjS3nWVd0JiPIjWKbTZTs,5957
|
||||||
|
aiohttp/frozenlist.py,sha256=PSElO5biFCVHyVEr6-hXy7--cDaHAxaWDrsFxESHsFc,1853
|
||||||
|
aiohttp/frozenlist.pyi,sha256=z-EGiL4Q5MTe1wxDZINsIhqh4Eb0oT9Xn0X_Rt7C9ns,1512
|
||||||
|
aiohttp/hdrs.py,sha256=PmN2SUiMmwiC0TMEEMSFfwirUpnrzy3jwUhniPGFlmc,3549
|
||||||
|
aiohttp/helpers.py,sha256=yAdG1c-axo7-Vsf3CRaEqb7hU5Ej-FpUgZowGA76f_U,23613
|
||||||
|
aiohttp/http.py,sha256=H9xNqvagxteFvx2R7AeYiGfze7uR6VKF5IsUAITr7d4,2183
|
||||||
|
aiohttp/http_exceptions.py,sha256=Oby70EpyDmwpsb4DpCFYXw-sa856HmWv8IjeHlWWlJo,2771
|
||||||
|
aiohttp/http_parser.py,sha256=Ttk5BSX11cXMaFJmquzd1oNkZbnodghQvBgdUGdQxnE,28676
|
||||||
|
aiohttp/http_websocket.py,sha256=KmHznrwSjtpUgxbFafBg1MaAaCpxGxoK0IL8wDKg9f8,25400
|
||||||
|
aiohttp/http_writer.py,sha256=VBMPy_AaB7m_keycuu05SCN2S3GVVyY8UCHG-W86Y1w,5411
|
||||||
|
aiohttp/locks.py,sha256=6DiJHW1eQKXypu1eWXZT3_amPhFBK-jnxdI-_BpYICk,1278
|
||||||
|
aiohttp/log.py,sha256=qAQMjI6XpX3MOAZATN4HcG0tIceSreR54orlYZaoJ0A,333
|
||||||
|
aiohttp/multipart.py,sha256=RPXfp5GMauxW19nbBaLAkzgUFKTQ9eMo4XtZ7ItGyo4,33740
|
||||||
|
aiohttp/payload.py,sha256=lCF_pZvwyBKJGk4OOLYEQhtxUwOW8rsFF0pxisvfBps,14483
|
||||||
|
aiohttp/payload_streamer.py,sha256=7koj4FVujDGriDIOes48XPp5BK9tsWYyTxJG-3aNaHc,2177
|
||||||
|
aiohttp/py.typed,sha256=E84IaZyFwfLqvXjOVW4LS6WH7QOaKEFpNh9TFyzHNQc,6
|
||||||
|
aiohttp/pytest_plugin.py,sha256=1_XNSrZS-czuaNVt4qvRQs-GbIIl8DaLykGpoDlZfhU,11187
|
||||||
|
aiohttp/resolver.py,sha256=mQvusmMHpS0JekvnX7R1y4aqQ7BIIv3FIkxO5wgv2xQ,3738
|
||||||
|
aiohttp/signals.py,sha256=I_QAX1S7VbN7KDnNO6CSnAzhzx42AYh2Dto_FC9DQ3k,982
|
||||||
|
aiohttp/signals.pyi,sha256=pg4KElFcxBNFU-OQpTe2x-7qKJ79bAlemgqE-yaciiU,341
|
||||||
|
aiohttp/streams.py,sha256=EPM7T5_aJLOXlBTIEeFapIQ1O33KsHTvT-wWH3X0QvQ,21093
|
||||||
|
aiohttp/tcp_helpers.py,sha256=q9fHztjKbR57sCc4zWoo89QDW88pLT0OpcdHLGcV3Fo,1694
|
||||||
|
aiohttp/test_utils.py,sha256=_GjrPdE_9v0SxzbM4Tmt8vst-KJPwL2ILM_Rl1jHhi4,21530
|
||||||
|
aiohttp/tracing.py,sha256=GGhlQDrx5AVwFt33Zl4DvBIoFcR7sXAsgXNxvkd2Uus,13740
|
||||||
|
aiohttp/typedefs.py,sha256=o4R9uAySHxTzedIfX3UPbD0a5TnD5inc_M-h_4qyC4U,1377
|
||||||
|
aiohttp/web.py,sha256=KQXp0C__KpeX8nYM3FWl-eoMAmj9LZIbx7YeI39pQco,19940
|
||||||
|
aiohttp/web_app.py,sha256=dHOhoDoakwdrya0cc6Jl6K723MKGmd_M5LxH3wDeGQI,17779
|
||||||
|
aiohttp/web_exceptions.py,sha256=CQvslnHcpFnreO-qNjnKOWQev7ZvlTG6jfV14NQwb1Q,10519
|
||||||
|
aiohttp/web_fileresponse.py,sha256=TftBNfbgowCQ0L5Iud-dewCAnXq5tIyP-8iZ-KrSHw8,13118
|
||||||
|
aiohttp/web_log.py,sha256=gOR8iLbhjeAUwGL-21qD31kA0HlYSNhpdX6eNwJ-3Uo,8490
|
||||||
|
aiohttp/web_middlewares.py,sha256=jATe_igeeoyBoWKBDW_ISOOzFKvxSoLJE1QPTqZPWGc,4310
|
||||||
|
aiohttp/web_protocol.py,sha256=Zol5oVApIE12NDLBV_W1oKW8AN-sGdBfC0RFMI050U0,22791
|
||||||
|
aiohttp/web_request.py,sha256=xzvj84uGe5Uuug1b4iKWZl8uko_0TpzYKa00POke_NM,26526
|
||||||
|
aiohttp/web_response.py,sha256=CEx04E7NLNg6mfgTjT0QPS9vJuglbw3UQvwob6Qeb7c,26202
|
||||||
|
aiohttp/web_routedef.py,sha256=5QCl85zQml2qoj7bkC9XMoK4stBVuUoiq_0uefxifjc,6293
|
||||||
|
aiohttp/web_runner.py,sha256=ArW4NjMJ24Fv68Ez-9hPL1WNzVygDYEWJ4aIfzOMKz8,11479
|
||||||
|
aiohttp/web_server.py,sha256=P826xDCDs4VgeksMam8OHKm_VzprXuOpsJrysqj3CVg,2222
|
||||||
|
aiohttp/web_urldispatcher.py,sha256=8uhNNXlHd2WJfJ4wcyQ1UxoRM1VUyWWwQhK-TPrM_GM,40043
|
||||||
|
aiohttp/web_ws.py,sha256=mAU6Ln3AbMZeXjUZSSA5MmE39hTajJIMxBE0xnq-4Tc,17414
|
||||||
|
aiohttp/worker.py,sha256=yatPZxpUOp9CzDA05Jb2UWi0eo2PgGWlQm4lIFCRCSY,8420
|
@ -1,5 +1,5 @@
|
|||||||
Wheel-Version: 1.0
|
Wheel-Version: 1.0
|
||||||
Generator: bdist_wheel (0.32.3)
|
Generator: bdist_wheel (0.35.1)
|
||||||
Root-Is-Purelib: false
|
Root-Is-Purelib: false
|
||||||
Tag: cp36-cp36m-win_amd64
|
Tag: cp36-cp36m-win_amd64
|
||||||
|
|
@ -1,110 +1,118 @@
|
|||||||
__version__ = '3.5.4'
|
__version__ = '3.6.3'
|
||||||
|
|
||||||
from typing import Tuple # noqa
|
from typing import Tuple # noqa
|
||||||
|
|
||||||
from . import hdrs
|
from . import hdrs as hdrs
|
||||||
|
from .client import BaseConnector as BaseConnector
|
||||||
|
from .client import ClientConnectionError as ClientConnectionError
|
||||||
from .client import (
|
from .client import (
|
||||||
BaseConnector,
|
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||||
ClientConnectionError,
|
|
||||||
ClientConnectorCertificateError,
|
|
||||||
ClientConnectorError,
|
|
||||||
ClientConnectorSSLError,
|
|
||||||
ClientError,
|
|
||||||
ClientHttpProxyError,
|
|
||||||
ClientOSError,
|
|
||||||
ClientPayloadError,
|
|
||||||
ClientProxyConnectionError,
|
|
||||||
ClientResponse,
|
|
||||||
ClientRequest,
|
|
||||||
ClientResponseError,
|
|
||||||
ClientSSLError,
|
|
||||||
ClientSession,
|
|
||||||
ClientTimeout,
|
|
||||||
ClientWebSocketResponse,
|
|
||||||
ContentTypeError,
|
|
||||||
Fingerprint,
|
|
||||||
InvalidURL,
|
|
||||||
RequestInfo,
|
|
||||||
ServerConnectionError,
|
|
||||||
ServerDisconnectedError,
|
|
||||||
ServerFingerprintMismatch,
|
|
||||||
ServerTimeoutError,
|
|
||||||
TCPConnector,
|
|
||||||
UnixConnector,
|
|
||||||
WSServerHandshakeError,
|
|
||||||
request
|
|
||||||
)
|
)
|
||||||
|
from .client import ClientConnectorError as ClientConnectorError
|
||||||
from .cookiejar import CookieJar, DummyCookieJar
|
from .client import ClientConnectorSSLError as ClientConnectorSSLError
|
||||||
from .formdata import FormData
|
from .client import ClientError as ClientError
|
||||||
from .helpers import BasicAuth, ChainMapProxy
|
from .client import ClientHttpProxyError as ClientHttpProxyError
|
||||||
from .http import (
|
from .client import ClientOSError as ClientOSError
|
||||||
HttpVersion,
|
from .client import ClientPayloadError as ClientPayloadError
|
||||||
HttpVersion10,
|
from .client import ClientProxyConnectionError as ClientProxyConnectionError
|
||||||
HttpVersion11,
|
from .client import ClientRequest as ClientRequest
|
||||||
WSMsgType,
|
from .client import ClientResponse as ClientResponse
|
||||||
WSCloseCode,
|
from .client import ClientResponseError as ClientResponseError
|
||||||
WSMessage,
|
from .client import ClientSession as ClientSession
|
||||||
WebSocketError
|
from .client import ClientSSLError as ClientSSLError
|
||||||
)
|
from .client import ClientTimeout as ClientTimeout
|
||||||
|
from .client import ClientWebSocketResponse as ClientWebSocketResponse
|
||||||
|
from .client import ContentTypeError as ContentTypeError
|
||||||
|
from .client import Fingerprint as Fingerprint
|
||||||
|
from .client import InvalidURL as InvalidURL
|
||||||
|
from .client import NamedPipeConnector as NamedPipeConnector
|
||||||
|
from .client import RequestInfo as RequestInfo
|
||||||
|
from .client import ServerConnectionError as ServerConnectionError
|
||||||
|
from .client import ServerDisconnectedError as ServerDisconnectedError
|
||||||
|
from .client import ServerFingerprintMismatch as ServerFingerprintMismatch
|
||||||
|
from .client import ServerTimeoutError as ServerTimeoutError
|
||||||
|
from .client import TCPConnector as TCPConnector
|
||||||
|
from .client import TooManyRedirects as TooManyRedirects
|
||||||
|
from .client import UnixConnector as UnixConnector
|
||||||
|
from .client import WSServerHandshakeError as WSServerHandshakeError
|
||||||
|
from .client import request as request
|
||||||
|
from .cookiejar import CookieJar as CookieJar
|
||||||
|
from .cookiejar import DummyCookieJar as DummyCookieJar
|
||||||
|
from .formdata import FormData as FormData
|
||||||
|
from .helpers import BasicAuth as BasicAuth
|
||||||
|
from .helpers import ChainMapProxy as ChainMapProxy
|
||||||
|
from .http import HttpVersion as HttpVersion
|
||||||
|
from .http import HttpVersion10 as HttpVersion10
|
||||||
|
from .http import HttpVersion11 as HttpVersion11
|
||||||
|
from .http import WebSocketError as WebSocketError
|
||||||
|
from .http import WSCloseCode as WSCloseCode
|
||||||
|
from .http import WSMessage as WSMessage
|
||||||
|
from .http import WSMsgType as WSMsgType
|
||||||
from .multipart import (
|
from .multipart import (
|
||||||
BadContentDispositionHeader,
|
BadContentDispositionHeader as BadContentDispositionHeader,
|
||||||
BadContentDispositionParam,
|
|
||||||
BodyPartReader,
|
|
||||||
MultipartReader,
|
|
||||||
MultipartWriter,
|
|
||||||
content_disposition_filename,
|
|
||||||
parse_content_disposition
|
|
||||||
)
|
)
|
||||||
|
from .multipart import BadContentDispositionParam as BadContentDispositionParam
|
||||||
from .payload import (
|
from .multipart import BodyPartReader as BodyPartReader
|
||||||
AsyncIterablePayload,
|
from .multipart import MultipartReader as MultipartReader
|
||||||
BufferedReaderPayload,
|
from .multipart import MultipartWriter as MultipartWriter
|
||||||
BytesIOPayload,
|
from .multipart import (
|
||||||
BytesPayload,
|
content_disposition_filename as content_disposition_filename,
|
||||||
IOBasePayload,
|
|
||||||
JsonPayload,
|
|
||||||
PAYLOAD_REGISTRY,
|
|
||||||
Payload,
|
|
||||||
StringIOPayload,
|
|
||||||
StringPayload,
|
|
||||||
TextIOPayload,
|
|
||||||
get_payload,
|
|
||||||
payload_type
|
|
||||||
)
|
)
|
||||||
|
from .multipart import parse_content_disposition as parse_content_disposition
|
||||||
from .payload_streamer import streamer
|
from .payload import PAYLOAD_REGISTRY as PAYLOAD_REGISTRY
|
||||||
|
from .payload import AsyncIterablePayload as AsyncIterablePayload
|
||||||
from .resolver import AsyncResolver, DefaultResolver, ThreadedResolver
|
from .payload import BufferedReaderPayload as BufferedReaderPayload
|
||||||
|
from .payload import BytesIOPayload as BytesIOPayload
|
||||||
from .signals import Signal
|
from .payload import BytesPayload as BytesPayload
|
||||||
|
from .payload import IOBasePayload as IOBasePayload
|
||||||
from .streams import (
|
from .payload import JsonPayload as JsonPayload
|
||||||
DataQueue,
|
from .payload import Payload as Payload
|
||||||
EMPTY_PAYLOAD,
|
from .payload import StringIOPayload as StringIOPayload
|
||||||
EofStream,
|
from .payload import StringPayload as StringPayload
|
||||||
FlowControlDataQueue,
|
from .payload import TextIOPayload as TextIOPayload
|
||||||
StreamReader
|
from .payload import get_payload as get_payload
|
||||||
)
|
from .payload import payload_type as payload_type
|
||||||
|
from .payload_streamer import streamer as streamer
|
||||||
|
from .resolver import AsyncResolver as AsyncResolver
|
||||||
|
from .resolver import DefaultResolver as DefaultResolver
|
||||||
|
from .resolver import ThreadedResolver as ThreadedResolver
|
||||||
|
from .signals import Signal as Signal
|
||||||
|
from .streams import EMPTY_PAYLOAD as EMPTY_PAYLOAD
|
||||||
|
from .streams import DataQueue as DataQueue
|
||||||
|
from .streams import EofStream as EofStream
|
||||||
|
from .streams import FlowControlDataQueue as FlowControlDataQueue
|
||||||
|
from .streams import StreamReader as StreamReader
|
||||||
|
from .tracing import TraceConfig as TraceConfig
|
||||||
from .tracing import (
|
from .tracing import (
|
||||||
TraceConfig,
|
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
||||||
TraceConnectionCreateEndParams,
|
)
|
||||||
TraceConnectionCreateStartParams,
|
from .tracing import (
|
||||||
TraceConnectionQueuedEndParams,
|
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
||||||
TraceConnectionQueuedStartParams,
|
)
|
||||||
TraceConnectionReuseconnParams,
|
from .tracing import (
|
||||||
TraceDnsCacheHitParams,
|
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
||||||
TraceDnsCacheMissParams,
|
)
|
||||||
TraceDnsResolveHostEndParams,
|
from .tracing import (
|
||||||
TraceDnsResolveHostStartParams,
|
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
||||||
TraceRequestChunkSentParams,
|
)
|
||||||
TraceRequestEndParams,
|
from .tracing import (
|
||||||
TraceRequestExceptionParams,
|
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
||||||
TraceRequestRedirectParams,
|
)
|
||||||
TraceRequestStartParams,
|
from .tracing import TraceDnsCacheHitParams as TraceDnsCacheHitParams
|
||||||
TraceResponseChunkReceivedParams
|
from .tracing import TraceDnsCacheMissParams as TraceDnsCacheMissParams
|
||||||
|
from .tracing import (
|
||||||
|
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
||||||
|
)
|
||||||
|
from .tracing import (
|
||||||
|
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
||||||
|
)
|
||||||
|
from .tracing import TraceRequestChunkSentParams as TraceRequestChunkSentParams
|
||||||
|
from .tracing import TraceRequestEndParams as TraceRequestEndParams
|
||||||
|
from .tracing import TraceRequestExceptionParams as TraceRequestExceptionParams
|
||||||
|
from .tracing import TraceRequestRedirectParams as TraceRequestRedirectParams
|
||||||
|
from .tracing import TraceRequestStartParams as TraceRequestStartParams
|
||||||
|
from .tracing import (
|
||||||
|
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
@ -136,7 +144,9 @@ __all__ = (
|
|||||||
'ServerFingerprintMismatch',
|
'ServerFingerprintMismatch',
|
||||||
'ServerTimeoutError',
|
'ServerTimeoutError',
|
||||||
'TCPConnector',
|
'TCPConnector',
|
||||||
|
'TooManyRedirects',
|
||||||
'UnixConnector',
|
'UnixConnector',
|
||||||
|
'NamedPipeConnector',
|
||||||
'WSServerHandshakeError',
|
'WSServerHandshakeError',
|
||||||
'request',
|
'request',
|
||||||
# cookiejar
|
# cookiejar
|
||||||
|
@ -1,16 +1,4 @@
|
|||||||
/* Generated by Cython 0.29.2 */
|
/* Generated by Cython 0.29.13 */
|
||||||
|
|
||||||
/* BEGIN: Cython Metadata
|
|
||||||
{
|
|
||||||
"distutils": {
|
|
||||||
"name": "aiohttp._frozenlist",
|
|
||||||
"sources": [
|
|
||||||
"aiohttp/_frozenlist.pyx"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"module_name": "aiohttp._frozenlist"
|
|
||||||
}
|
|
||||||
END: Cython Metadata */
|
|
||||||
|
|
||||||
#define PY_SSIZE_T_CLEAN
|
#define PY_SSIZE_T_CLEAN
|
||||||
#include "Python.h"
|
#include "Python.h"
|
||||||
@ -19,9 +7,9 @@ END: Cython Metadata */
|
|||||||
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
||||||
#error Cython requires Python 2.6+ or Python 3.3+.
|
#error Cython requires Python 2.6+ or Python 3.3+.
|
||||||
#else
|
#else
|
||||||
#define CYTHON_ABI "0_29_2"
|
#define CYTHON_ABI "0_29_13"
|
||||||
#define CYTHON_HEX_VERSION 0x001D02F0
|
#define CYTHON_HEX_VERSION 0x001D0DF0
|
||||||
#define CYTHON_FUTURE_DIVISION 0
|
#define CYTHON_FUTURE_DIVISION 1
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#ifndef offsetof
|
#ifndef offsetof
|
||||||
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
|
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
|
||||||
@ -322,8 +310,13 @@ END: Cython Metadata */
|
|||||||
#define __Pyx_DefaultClassType PyClass_Type
|
#define __Pyx_DefaultClassType PyClass_Type
|
||||||
#else
|
#else
|
||||||
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
||||||
|
#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
|
||||||
|
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||||
|
PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||||
|
#else
|
||||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||||
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||||
|
#endif
|
||||||
#define __Pyx_DefaultClassType PyType_Type
|
#define __Pyx_DefaultClassType PyType_Type
|
||||||
#endif
|
#endif
|
||||||
#ifndef Py_TPFLAGS_CHECKTYPES
|
#ifndef Py_TPFLAGS_CHECKTYPES
|
||||||
@ -358,26 +351,6 @@ END: Cython Metadata */
|
|||||||
#else
|
#else
|
||||||
#define __Pyx_PyFastCFunction_Check(func) 0
|
#define __Pyx_PyFastCFunction_Check(func) 0
|
||||||
#endif
|
#endif
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
|
||||||
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
|
|
||||||
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
|
|
||||||
(version_var) = __PYX_GET_DICT_VERSION(dict);\
|
|
||||||
(cache_var) = (value);
|
|
||||||
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
|
|
||||||
static PY_UINT64_T __pyx_dict_version = 0;\
|
|
||||||
static PyObject *__pyx_dict_cached_value = NULL;\
|
|
||||||
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
|
|
||||||
(VAR) = __pyx_dict_cached_value;\
|
|
||||||
} else {\
|
|
||||||
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
|
|
||||||
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
|
|
||||||
}\
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
#define __PYX_GET_DICT_VERSION(dict) (0)
|
|
||||||
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
|
|
||||||
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
|
|
||||||
#endif
|
|
||||||
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
|
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
|
||||||
#define PyObject_Malloc(s) PyMem_Malloc(s)
|
#define PyObject_Malloc(s) PyMem_Malloc(s)
|
||||||
#define PyObject_Free(p) PyMem_Free(p)
|
#define PyObject_Free(p) PyMem_Free(p)
|
||||||
@ -410,7 +383,7 @@ END: Cython Metadata */
|
|||||||
typedef int Py_tss_t;
|
typedef int Py_tss_t;
|
||||||
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
|
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
|
||||||
*key = PyThread_create_key();
|
*key = PyThread_create_key();
|
||||||
return 0; // PyThread_create_key reports success always
|
return 0;
|
||||||
}
|
}
|
||||||
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
|
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
|
||||||
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
|
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
|
||||||
@ -433,7 +406,7 @@ static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
|
|||||||
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
||||||
return PyThread_get_key_value(*key);
|
return PyThread_get_key_value(*key);
|
||||||
}
|
}
|
||||||
#endif // TSS (Thread Specific Storage) API
|
#endif
|
||||||
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
|
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
|
||||||
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
|
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
|
||||||
#else
|
#else
|
||||||
@ -632,7 +605,8 @@ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* enc
|
|||||||
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
|
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
|
||||||
|
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
|
||||||
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING ""
|
#define __PYX_DEFAULT_STRING_ENCODING ""
|
||||||
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
|
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
|
||||||
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
|
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
|
||||||
@ -1023,7 +997,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyObject_GetItem(PyObject *obj, PyObject* k
|
|||||||
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
|
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
|
||||||
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
|
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
|
||||||
#if 1 || PY_VERSION_HEX < 0x030600B1
|
#if 1 || PY_VERSION_HEX < 0x030600B1
|
||||||
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs);
|
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
|
||||||
#else
|
#else
|
||||||
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
|
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
|
||||||
#endif
|
#endif
|
||||||
@ -1089,18 +1063,18 @@ static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t
|
|||||||
#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\
|
#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\
|
||||||
(likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ?\
|
(likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ?\
|
||||||
__Pyx__PyList_PopIndex(L, py_ix, ix) : (\
|
__Pyx__PyList_PopIndex(L, py_ix, ix) : (\
|
||||||
(unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\
|
(unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\
|
||||||
__Pyx__PyObject_PopIndex(L, py_ix)))
|
__Pyx__PyObject_PopIndex(L, py_ix)))
|
||||||
#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\
|
#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\
|
||||||
__Pyx_fits_Py_ssize_t(ix, type, is_signed) ?\
|
__Pyx_fits_Py_ssize_t(ix, type, is_signed) ?\
|
||||||
__Pyx__PyList_PopIndex(L, py_ix, ix) : (\
|
__Pyx__PyList_PopIndex(L, py_ix, ix) : (\
|
||||||
(unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\
|
(unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\
|
||||||
__Pyx__PyObject_PopIndex(L, py_ix)))
|
__Pyx__PyObject_PopIndex(L, py_ix)))
|
||||||
#else
|
#else
|
||||||
#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)\
|
#define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)\
|
||||||
__Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)
|
__Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)
|
||||||
#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\
|
#define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\
|
||||||
(unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\
|
(unlikely((py_ix) == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\
|
||||||
__Pyx__PyObject_PopIndex(L, py_ix))
|
__Pyx__PyObject_PopIndex(L, py_ix))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -1135,6 +1109,32 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *);
|
|||||||
/* GetAttr3.proto */
|
/* GetAttr3.proto */
|
||||||
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
|
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
|
||||||
|
|
||||||
|
/* PyDictVersioning.proto */
|
||||||
|
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
|
||||||
|
#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
|
||||||
|
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
|
||||||
|
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
|
||||||
|
(version_var) = __PYX_GET_DICT_VERSION(dict);\
|
||||||
|
(cache_var) = (value);
|
||||||
|
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
|
||||||
|
static PY_UINT64_T __pyx_dict_version = 0;\
|
||||||
|
static PyObject *__pyx_dict_cached_value = NULL;\
|
||||||
|
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
|
||||||
|
(VAR) = __pyx_dict_cached_value;\
|
||||||
|
} else {\
|
||||||
|
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
|
||||||
|
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
|
||||||
|
}\
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
|
||||||
|
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
|
||||||
|
#else
|
||||||
|
#define __PYX_GET_DICT_VERSION(dict) (0)
|
||||||
|
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
|
||||||
|
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
|
||||||
|
#endif
|
||||||
|
|
||||||
/* GetModuleGlobalName.proto */
|
/* GetModuleGlobalName.proto */
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
#if CYTHON_USE_DICT_VERSIONS
|
||||||
#define __Pyx_GetModuleGlobalName(var, name) {\
|
#define __Pyx_GetModuleGlobalName(var, name) {\
|
||||||
@ -1298,9 +1298,9 @@ static const char __pyx_k_aiohttp__frozenlist[] = "aiohttp._frozenlist";
|
|||||||
static const char __pyx_k_pyx_unpickle_FrozenList[] = "__pyx_unpickle_FrozenList";
|
static const char __pyx_k_pyx_unpickle_FrozenList[] = "__pyx_unpickle_FrozenList";
|
||||||
static const char __pyx_k_Cannot_modify_frozen_list[] = "Cannot modify frozen list.";
|
static const char __pyx_k_Cannot_modify_frozen_list[] = "Cannot modify frozen list.";
|
||||||
static const char __pyx_k_Incompatible_checksums_s_vs_0x94[] = "Incompatible checksums (%s vs 0x949a143 = (_items, frozen))";
|
static const char __pyx_k_Incompatible_checksums_s_vs_0x94[] = "Incompatible checksums (%s vs 0x949a143 = (_items, frozen))";
|
||||||
static PyObject *__pyx_kp_s_Cannot_modify_frozen_list;
|
static PyObject *__pyx_kp_u_Cannot_modify_frozen_list;
|
||||||
static PyObject *__pyx_n_s_FrozenList;
|
static PyObject *__pyx_n_s_FrozenList;
|
||||||
static PyObject *__pyx_kp_s_FrozenList_frozen_r;
|
static PyObject *__pyx_kp_u_FrozenList_frozen_r;
|
||||||
static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x94;
|
static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x94;
|
||||||
static PyObject *__pyx_n_s_MutableSequence;
|
static PyObject *__pyx_n_s_MutableSequence;
|
||||||
static PyObject *__pyx_n_s_PickleError;
|
static PyObject *__pyx_n_s_PickleError;
|
||||||
@ -3427,7 +3427,7 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
__Pyx_XDECREF(__pyx_r);
|
__Pyx_XDECREF(__pyx_r);
|
||||||
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_FrozenList_frozen_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 104, __pyx_L1_error)
|
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_FrozenList_frozen_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 104, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_t_2);
|
__Pyx_GOTREF(__pyx_t_2);
|
||||||
__pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 104, __pyx_L1_error)
|
__pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 104, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_t_3);
|
__Pyx_GOTREF(__pyx_t_3);
|
||||||
@ -3963,7 +3963,7 @@ static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHO
|
|||||||
__Pyx_INCREF(__pyx_n_s_PickleError);
|
__Pyx_INCREF(__pyx_n_s_PickleError);
|
||||||
__Pyx_GIVEREF(__pyx_n_s_PickleError);
|
__Pyx_GIVEREF(__pyx_n_s_PickleError);
|
||||||
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
|
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
|
||||||
__pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
|
__pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_t_3);
|
__Pyx_GOTREF(__pyx_t_3);
|
||||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||||
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
|
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||||
@ -4472,6 +4472,9 @@ static PyTypeObject __pyx_type_7aiohttp_11_frozenlist_FrozenList = {
|
|||||||
#if PY_VERSION_HEX >= 0x030400a1
|
#if PY_VERSION_HEX >= 0x030400a1
|
||||||
0, /*tp_finalize*/
|
0, /*tp_finalize*/
|
||||||
#endif
|
#endif
|
||||||
|
#if PY_VERSION_HEX >= 0x030800b1
|
||||||
|
0, /*tp_vectorcall*/
|
||||||
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
static PyMethodDef __pyx_methods[] = {
|
static PyMethodDef __pyx_methods[] = {
|
||||||
@ -4520,9 +4523,9 @@ static struct PyModuleDef __pyx_moduledef = {
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
static __Pyx_StringTabEntry __pyx_string_tab[] = {
|
static __Pyx_StringTabEntry __pyx_string_tab[] = {
|
||||||
{&__pyx_kp_s_Cannot_modify_frozen_list, __pyx_k_Cannot_modify_frozen_list, sizeof(__pyx_k_Cannot_modify_frozen_list), 0, 0, 1, 0},
|
{&__pyx_kp_u_Cannot_modify_frozen_list, __pyx_k_Cannot_modify_frozen_list, sizeof(__pyx_k_Cannot_modify_frozen_list), 0, 1, 0, 0},
|
||||||
{&__pyx_n_s_FrozenList, __pyx_k_FrozenList, sizeof(__pyx_k_FrozenList), 0, 0, 1, 1},
|
{&__pyx_n_s_FrozenList, __pyx_k_FrozenList, sizeof(__pyx_k_FrozenList), 0, 0, 1, 1},
|
||||||
{&__pyx_kp_s_FrozenList_frozen_r, __pyx_k_FrozenList_frozen_r, sizeof(__pyx_k_FrozenList_frozen_r), 0, 0, 1, 0},
|
{&__pyx_kp_u_FrozenList_frozen_r, __pyx_k_FrozenList_frozen_r, sizeof(__pyx_k_FrozenList_frozen_r), 0, 1, 0, 0},
|
||||||
{&__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_k_Incompatible_checksums_s_vs_0x94, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x94), 0, 0, 1, 0},
|
{&__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_k_Incompatible_checksums_s_vs_0x94, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x94), 0, 0, 1, 0},
|
||||||
{&__pyx_n_s_MutableSequence, __pyx_k_MutableSequence, sizeof(__pyx_k_MutableSequence), 0, 0, 1, 1},
|
{&__pyx_n_s_MutableSequence, __pyx_k_MutableSequence, sizeof(__pyx_k_MutableSequence), 0, 0, 1, 1},
|
||||||
{&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1},
|
{&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1},
|
||||||
@ -4584,7 +4587,7 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
|
|||||||
*
|
*
|
||||||
* cdef inline object _fast_len(self):
|
* cdef inline object _fast_len(self):
|
||||||
*/
|
*/
|
||||||
__pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_Cannot_modify_frozen_list); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 19, __pyx_L1_error)
|
__pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_Cannot_modify_frozen_list); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 19, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_tuple_);
|
__Pyx_GOTREF(__pyx_tuple_);
|
||||||
__Pyx_GIVEREF(__pyx_tuple_);
|
__Pyx_GIVEREF(__pyx_tuple_);
|
||||||
|
|
||||||
@ -4659,7 +4662,9 @@ static int __Pyx_modinit_type_init_code(void) {
|
|||||||
__pyx_vtable_7aiohttp_11_frozenlist_FrozenList._check_frozen = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen;
|
__pyx_vtable_7aiohttp_11_frozenlist_FrozenList._check_frozen = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen;
|
||||||
__pyx_vtable_7aiohttp_11_frozenlist_FrozenList._fast_len = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len;
|
__pyx_vtable_7aiohttp_11_frozenlist_FrozenList._fast_len = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len;
|
||||||
if (PyType_Ready(&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error)
|
if (PyType_Ready(&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error)
|
||||||
|
#if PY_VERSION_HEX < 0x030800B1
|
||||||
__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_print = 0;
|
__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_print = 0;
|
||||||
|
#endif
|
||||||
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dictoffset && __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro == PyObject_GenericGetAttr)) {
|
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dictoffset && __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro == PyObject_GenericGetAttr)) {
|
||||||
__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro = __Pyx_PyObject_GenericGetAttr;
|
__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_getattro = __Pyx_PyObject_GenericGetAttr;
|
||||||
}
|
}
|
||||||
@ -4859,10 +4864,9 @@ if (!__Pyx_RefNanny) {
|
|||||||
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
Py_INCREF(__pyx_d);
|
Py_INCREF(__pyx_d);
|
||||||
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
|
|
||||||
#if CYTHON_COMPILING_IN_PYPY
|
|
||||||
Py_INCREF(__pyx_b);
|
Py_INCREF(__pyx_b);
|
||||||
#endif
|
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
|
Py_INCREF(__pyx_cython_runtime);
|
||||||
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
||||||
/*--- Initialize various global constants etc. ---*/
|
/*--- Initialize various global constants etc. ---*/
|
||||||
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
@ -4881,9 +4885,9 @@ if (!__Pyx_RefNanny) {
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
/*--- Builtin init code ---*/
|
/*--- Builtin init code ---*/
|
||||||
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
|
||||||
/*--- Constants init code ---*/
|
/*--- Constants init code ---*/
|
||||||
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
|
||||||
/*--- Global type/function init code ---*/
|
/*--- Global type/function init code ---*/
|
||||||
(void)__Pyx_modinit_global_init_code();
|
(void)__Pyx_modinit_global_init_code();
|
||||||
(void)__Pyx_modinit_variable_export_code();
|
(void)__Pyx_modinit_variable_export_code();
|
||||||
@ -4907,7 +4911,7 @@ if (!__Pyx_RefNanny) {
|
|||||||
__Pyx_INCREF(__pyx_n_s_MutableSequence);
|
__Pyx_INCREF(__pyx_n_s_MutableSequence);
|
||||||
__Pyx_GIVEREF(__pyx_n_s_MutableSequence);
|
__Pyx_GIVEREF(__pyx_n_s_MutableSequence);
|
||||||
PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_MutableSequence);
|
PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_MutableSequence);
|
||||||
__pyx_t_2 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_t_2 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_t_2);
|
__Pyx_GOTREF(__pyx_t_2);
|
||||||
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
|
||||||
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
@ -5514,7 +5518,7 @@ static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
#if 1 || PY_VERSION_HEX < 0x030600B1
|
#if 1 || PY_VERSION_HEX < 0x030600B1
|
||||||
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) {
|
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
|
||||||
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
|
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
|
||||||
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
||||||
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
||||||
@ -5585,12 +5589,12 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
|
|||||||
}
|
}
|
||||||
#if PY_MAJOR_VERSION >= 3
|
#if PY_MAJOR_VERSION >= 3
|
||||||
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
|
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
|
||||||
args, nargs,
|
args, (int)nargs,
|
||||||
k, (int)nk,
|
k, (int)nk,
|
||||||
d, (int)nd, kwdefs, closure);
|
d, (int)nd, kwdefs, closure);
|
||||||
#else
|
#else
|
||||||
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
|
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
|
||||||
args, nargs,
|
args, (int)nargs,
|
||||||
k, (int)nk,
|
k, (int)nk,
|
||||||
d, (int)nd, closure);
|
d, (int)nd, closure);
|
||||||
#endif
|
#endif
|
||||||
@ -6005,6 +6009,32 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject
|
|||||||
return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
|
return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* PyDictVersioning */
|
||||||
|
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
|
||||||
|
PyObject *dict = Py_TYPE(obj)->tp_dict;
|
||||||
|
return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
|
||||||
|
PyObject **dictptr = NULL;
|
||||||
|
Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
|
||||||
|
if (offset) {
|
||||||
|
#if CYTHON_COMPILING_IN_CPYTHON
|
||||||
|
dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
|
||||||
|
#else
|
||||||
|
dictptr = _PyObject_GetDictPtr(obj);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
|
||||||
|
PyObject *dict = Py_TYPE(obj)->tp_dict;
|
||||||
|
if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
|
||||||
|
return 0;
|
||||||
|
return obj_dict_version == __Pyx_get_object_dict_version(obj);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* GetModuleGlobalName */
|
/* GetModuleGlobalName */
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
#if CYTHON_USE_DICT_VERSIONS
|
||||||
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
|
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
|
||||||
|
Binary file not shown.
@ -1,16 +1,4 @@
|
|||||||
/* Generated by Cython 0.29.2 */
|
/* Generated by Cython 0.29.13 */
|
||||||
|
|
||||||
/* BEGIN: Cython Metadata
|
|
||||||
{
|
|
||||||
"distutils": {
|
|
||||||
"name": "aiohttp._helpers",
|
|
||||||
"sources": [
|
|
||||||
"aiohttp/_helpers.pyx"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"module_name": "aiohttp._helpers"
|
|
||||||
}
|
|
||||||
END: Cython Metadata */
|
|
||||||
|
|
||||||
#define PY_SSIZE_T_CLEAN
|
#define PY_SSIZE_T_CLEAN
|
||||||
#include "Python.h"
|
#include "Python.h"
|
||||||
@ -19,9 +7,9 @@ END: Cython Metadata */
|
|||||||
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
||||||
#error Cython requires Python 2.6+ or Python 3.3+.
|
#error Cython requires Python 2.6+ or Python 3.3+.
|
||||||
#else
|
#else
|
||||||
#define CYTHON_ABI "0_29_2"
|
#define CYTHON_ABI "0_29_13"
|
||||||
#define CYTHON_HEX_VERSION 0x001D02F0
|
#define CYTHON_HEX_VERSION 0x001D0DF0
|
||||||
#define CYTHON_FUTURE_DIVISION 0
|
#define CYTHON_FUTURE_DIVISION 1
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#ifndef offsetof
|
#ifndef offsetof
|
||||||
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
|
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
|
||||||
@ -322,8 +310,13 @@ END: Cython Metadata */
|
|||||||
#define __Pyx_DefaultClassType PyClass_Type
|
#define __Pyx_DefaultClassType PyClass_Type
|
||||||
#else
|
#else
|
||||||
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
||||||
|
#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
|
||||||
|
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||||
|
PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||||
|
#else
|
||||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||||
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||||
|
#endif
|
||||||
#define __Pyx_DefaultClassType PyType_Type
|
#define __Pyx_DefaultClassType PyType_Type
|
||||||
#endif
|
#endif
|
||||||
#ifndef Py_TPFLAGS_CHECKTYPES
|
#ifndef Py_TPFLAGS_CHECKTYPES
|
||||||
@ -358,26 +351,6 @@ END: Cython Metadata */
|
|||||||
#else
|
#else
|
||||||
#define __Pyx_PyFastCFunction_Check(func) 0
|
#define __Pyx_PyFastCFunction_Check(func) 0
|
||||||
#endif
|
#endif
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
|
||||||
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
|
|
||||||
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
|
|
||||||
(version_var) = __PYX_GET_DICT_VERSION(dict);\
|
|
||||||
(cache_var) = (value);
|
|
||||||
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
|
|
||||||
static PY_UINT64_T __pyx_dict_version = 0;\
|
|
||||||
static PyObject *__pyx_dict_cached_value = NULL;\
|
|
||||||
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
|
|
||||||
(VAR) = __pyx_dict_cached_value;\
|
|
||||||
} else {\
|
|
||||||
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
|
|
||||||
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
|
|
||||||
}\
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
#define __PYX_GET_DICT_VERSION(dict) (0)
|
|
||||||
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
|
|
||||||
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
|
|
||||||
#endif
|
|
||||||
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
|
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
|
||||||
#define PyObject_Malloc(s) PyMem_Malloc(s)
|
#define PyObject_Malloc(s) PyMem_Malloc(s)
|
||||||
#define PyObject_Free(p) PyMem_Free(p)
|
#define PyObject_Free(p) PyMem_Free(p)
|
||||||
@ -410,7 +383,7 @@ END: Cython Metadata */
|
|||||||
typedef int Py_tss_t;
|
typedef int Py_tss_t;
|
||||||
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
|
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
|
||||||
*key = PyThread_create_key();
|
*key = PyThread_create_key();
|
||||||
return 0; // PyThread_create_key reports success always
|
return 0;
|
||||||
}
|
}
|
||||||
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
|
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
|
||||||
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
|
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
|
||||||
@ -433,7 +406,7 @@ static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
|
|||||||
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
||||||
return PyThread_get_key_value(*key);
|
return PyThread_get_key_value(*key);
|
||||||
}
|
}
|
||||||
#endif // TSS (Thread Specific Storage) API
|
#endif
|
||||||
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
|
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
|
||||||
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
|
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
|
||||||
#else
|
#else
|
||||||
@ -632,7 +605,8 @@ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* enc
|
|||||||
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
|
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
|
||||||
|
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
|
||||||
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING ""
|
#define __PYX_DEFAULT_STRING_ENCODING ""
|
||||||
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
|
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
|
||||||
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
|
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
|
||||||
@ -1016,7 +990,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObje
|
|||||||
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
|
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
|
||||||
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
|
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
|
||||||
#if 1 || PY_VERSION_HEX < 0x030600B1
|
#if 1 || PY_VERSION_HEX < 0x030600B1
|
||||||
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs);
|
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
|
||||||
#else
|
#else
|
||||||
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
|
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
|
||||||
#endif
|
#endif
|
||||||
@ -1086,6 +1060,32 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *);
|
|||||||
/* GetAttr3.proto */
|
/* GetAttr3.proto */
|
||||||
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
|
static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *);
|
||||||
|
|
||||||
|
/* PyDictVersioning.proto */
|
||||||
|
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
|
||||||
|
#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
|
||||||
|
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
|
||||||
|
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
|
||||||
|
(version_var) = __PYX_GET_DICT_VERSION(dict);\
|
||||||
|
(cache_var) = (value);
|
||||||
|
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
|
||||||
|
static PY_UINT64_T __pyx_dict_version = 0;\
|
||||||
|
static PyObject *__pyx_dict_cached_value = NULL;\
|
||||||
|
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
|
||||||
|
(VAR) = __pyx_dict_cached_value;\
|
||||||
|
} else {\
|
||||||
|
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
|
||||||
|
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
|
||||||
|
}\
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
|
||||||
|
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
|
||||||
|
#else
|
||||||
|
#define __PYX_GET_DICT_VERSION(dict) (0)
|
||||||
|
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
|
||||||
|
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
|
||||||
|
#endif
|
||||||
|
|
||||||
/* GetModuleGlobalName.proto */
|
/* GetModuleGlobalName.proto */
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
#if CYTHON_USE_DICT_VERSIONS
|
||||||
#define __Pyx_GetModuleGlobalName(var, name) {\
|
#define __Pyx_GetModuleGlobalName(var, name) {\
|
||||||
@ -1254,7 +1254,7 @@ static PyObject *__pyx_n_s_pyx_unpickle_reify;
|
|||||||
static PyObject *__pyx_n_s_reduce;
|
static PyObject *__pyx_n_s_reduce;
|
||||||
static PyObject *__pyx_n_s_reduce_cython;
|
static PyObject *__pyx_n_s_reduce_cython;
|
||||||
static PyObject *__pyx_n_s_reduce_ex;
|
static PyObject *__pyx_n_s_reduce_ex;
|
||||||
static PyObject *__pyx_kp_s_reified_property_is_read_only;
|
static PyObject *__pyx_kp_u_reified_property_is_read_only;
|
||||||
static PyObject *__pyx_n_s_reify;
|
static PyObject *__pyx_n_s_reify;
|
||||||
static PyObject *__pyx_n_s_setstate;
|
static PyObject *__pyx_n_s_setstate;
|
||||||
static PyObject *__pyx_n_s_setstate_cython;
|
static PyObject *__pyx_n_s_setstate_cython;
|
||||||
@ -1657,12 +1657,12 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers_5reify_2__get__(struct __pyx_obj_7a
|
|||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
__pyx_L3_error:;
|
__pyx_L3_error:;
|
||||||
__Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0;
|
|
||||||
__Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0;
|
|
||||||
__Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0;
|
|
||||||
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
|
|
||||||
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
|
|
||||||
__Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;
|
__Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;
|
||||||
|
__Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0;
|
||||||
|
__Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0;
|
||||||
|
__Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0;
|
||||||
|
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
|
||||||
|
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
|
||||||
|
|
||||||
/* "aiohttp/_helpers.pyx":29
|
/* "aiohttp/_helpers.pyx":29
|
||||||
* inst._cache[self.name] = val
|
* inst._cache[self.name] = val
|
||||||
@ -2250,7 +2250,7 @@ static PyObject *__pyx_pf_7aiohttp_8_helpers___pyx_unpickle_reify(CYTHON_UNUSED
|
|||||||
__Pyx_INCREF(__pyx_n_s_PickleError);
|
__Pyx_INCREF(__pyx_n_s_PickleError);
|
||||||
__Pyx_GIVEREF(__pyx_n_s_PickleError);
|
__Pyx_GIVEREF(__pyx_n_s_PickleError);
|
||||||
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
|
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError);
|
||||||
__pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
|
__pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_t_3);
|
__Pyx_GOTREF(__pyx_t_3);
|
||||||
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
|
||||||
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
|
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error)
|
||||||
@ -2676,6 +2676,9 @@ static PyTypeObject __pyx_type_7aiohttp_8_helpers_reify = {
|
|||||||
#if PY_VERSION_HEX >= 0x030400a1
|
#if PY_VERSION_HEX >= 0x030400a1
|
||||||
0, /*tp_finalize*/
|
0, /*tp_finalize*/
|
||||||
#endif
|
#endif
|
||||||
|
#if PY_VERSION_HEX >= 0x030800b1
|
||||||
|
0, /*tp_vectorcall*/
|
||||||
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
static PyMethodDef __pyx_methods[] = {
|
static PyMethodDef __pyx_methods[] = {
|
||||||
@ -2748,7 +2751,7 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = {
|
|||||||
{&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},
|
{&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},
|
||||||
{&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},
|
{&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},
|
||||||
{&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},
|
{&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},
|
||||||
{&__pyx_kp_s_reified_property_is_read_only, __pyx_k_reified_property_is_read_only, sizeof(__pyx_k_reified_property_is_read_only), 0, 0, 1, 0},
|
{&__pyx_kp_u_reified_property_is_read_only, __pyx_k_reified_property_is_read_only, sizeof(__pyx_k_reified_property_is_read_only), 0, 1, 0, 0},
|
||||||
{&__pyx_n_s_reify, __pyx_k_reify, sizeof(__pyx_k_reify), 0, 0, 1, 1},
|
{&__pyx_n_s_reify, __pyx_k_reify, sizeof(__pyx_k_reify), 0, 0, 1, 1},
|
||||||
{&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},
|
{&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},
|
||||||
{&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},
|
{&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},
|
||||||
@ -2775,7 +2778,7 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
|
|||||||
* def __set__(self, inst, value):
|
* def __set__(self, inst, value):
|
||||||
* raise AttributeError("reified property is read-only") # <<<<<<<<<<<<<<
|
* raise AttributeError("reified property is read-only") # <<<<<<<<<<<<<<
|
||||||
*/
|
*/
|
||||||
__pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_reified_property_is_read_only); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 35, __pyx_L1_error)
|
__pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_u_reified_property_is_read_only); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 35, __pyx_L1_error)
|
||||||
__Pyx_GOTREF(__pyx_tuple_);
|
__Pyx_GOTREF(__pyx_tuple_);
|
||||||
__Pyx_GIVEREF(__pyx_tuple_);
|
__Pyx_GIVEREF(__pyx_tuple_);
|
||||||
|
|
||||||
@ -2840,7 +2843,9 @@ static int __Pyx_modinit_type_init_code(void) {
|
|||||||
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
|
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
|
||||||
/*--- Type init code ---*/
|
/*--- Type init code ---*/
|
||||||
if (PyType_Ready(&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (PyType_Ready(&__pyx_type_7aiohttp_8_helpers_reify) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
|
#if PY_VERSION_HEX < 0x030800B1
|
||||||
__pyx_type_7aiohttp_8_helpers_reify.tp_print = 0;
|
__pyx_type_7aiohttp_8_helpers_reify.tp_print = 0;
|
||||||
|
#endif
|
||||||
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_8_helpers_reify.tp_dictoffset && __pyx_type_7aiohttp_8_helpers_reify.tp_getattro == PyObject_GenericGetAttr)) {
|
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_7aiohttp_8_helpers_reify.tp_dictoffset && __pyx_type_7aiohttp_8_helpers_reify.tp_getattro == PyObject_GenericGetAttr)) {
|
||||||
__pyx_type_7aiohttp_8_helpers_reify.tp_getattro = __Pyx_PyObject_GenericGetAttr;
|
__pyx_type_7aiohttp_8_helpers_reify.tp_getattro = __Pyx_PyObject_GenericGetAttr;
|
||||||
}
|
}
|
||||||
@ -3038,10 +3043,9 @@ if (!__Pyx_RefNanny) {
|
|||||||
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
Py_INCREF(__pyx_d);
|
Py_INCREF(__pyx_d);
|
||||||
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
|
|
||||||
#if CYTHON_COMPILING_IN_PYPY
|
|
||||||
Py_INCREF(__pyx_b);
|
Py_INCREF(__pyx_b);
|
||||||
#endif
|
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
|
Py_INCREF(__pyx_cython_runtime);
|
||||||
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
||||||
/*--- Initialize various global constants etc. ---*/
|
/*--- Initialize various global constants etc. ---*/
|
||||||
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
@ -3060,9 +3064,9 @@ if (!__Pyx_RefNanny) {
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
/*--- Builtin init code ---*/
|
/*--- Builtin init code ---*/
|
||||||
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
|
||||||
/*--- Constants init code ---*/
|
/*--- Constants init code ---*/
|
||||||
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
|
||||||
/*--- Global type/function init code ---*/
|
/*--- Global type/function init code ---*/
|
||||||
(void)__Pyx_modinit_global_init_code();
|
(void)__Pyx_modinit_global_init_code();
|
||||||
(void)__Pyx_modinit_variable_export_code();
|
(void)__Pyx_modinit_variable_export_code();
|
||||||
@ -3633,7 +3637,7 @@ static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
#if 1 || PY_VERSION_HEX < 0x030600B1
|
#if 1 || PY_VERSION_HEX < 0x030600B1
|
||||||
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) {
|
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
|
||||||
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
|
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
|
||||||
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
||||||
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
||||||
@ -3704,12 +3708,12 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
|
|||||||
}
|
}
|
||||||
#if PY_MAJOR_VERSION >= 3
|
#if PY_MAJOR_VERSION >= 3
|
||||||
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
|
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
|
||||||
args, nargs,
|
args, (int)nargs,
|
||||||
k, (int)nk,
|
k, (int)nk,
|
||||||
d, (int)nd, kwdefs, closure);
|
d, (int)nd, kwdefs, closure);
|
||||||
#else
|
#else
|
||||||
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
|
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
|
||||||
args, nargs,
|
args, (int)nargs,
|
||||||
k, (int)nk,
|
k, (int)nk,
|
||||||
d, (int)nd, closure);
|
d, (int)nd, closure);
|
||||||
#endif
|
#endif
|
||||||
@ -4041,6 +4045,32 @@ static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject
|
|||||||
return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
|
return (likely(r)) ? r : __Pyx_GetAttr3Default(d);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* PyDictVersioning */
|
||||||
|
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
|
||||||
|
PyObject *dict = Py_TYPE(obj)->tp_dict;
|
||||||
|
return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
|
||||||
|
PyObject **dictptr = NULL;
|
||||||
|
Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
|
||||||
|
if (offset) {
|
||||||
|
#if CYTHON_COMPILING_IN_CPYTHON
|
||||||
|
dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
|
||||||
|
#else
|
||||||
|
dictptr = _PyObject_GetDictPtr(obj);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
|
||||||
|
PyObject *dict = Py_TYPE(obj)->tp_dict;
|
||||||
|
if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
|
||||||
|
return 0;
|
||||||
|
return obj_dict_version == __Pyx_get_object_dict_version(obj);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* GetModuleGlobalName */
|
/* GetModuleGlobalName */
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
#if CYTHON_USE_DICT_VERSIONS
|
||||||
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
|
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
|
||||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -576,7 +576,8 @@ cdef class HttpResponseParser(HttpParser):
|
|||||||
if self._buf:
|
if self._buf:
|
||||||
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
||||||
PyByteArray_Resize(self._buf, 0)
|
PyByteArray_Resize(self._buf, 0)
|
||||||
|
else:
|
||||||
|
self._reason = self._reason or ''
|
||||||
|
|
||||||
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
cdef int cb_on_message_begin(cparser.http_parser* parser) except -1:
|
||||||
cdef HttpParser pyparser = <HttpParser>parser.data
|
cdef HttpParser pyparser = <HttpParser>parser.data
|
||||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -123,28 +123,28 @@ def _serialize_headers(str status_line, headers):
|
|||||||
try:
|
try:
|
||||||
if _write_str(&writer, status_line) < 0:
|
if _write_str(&writer, status_line) < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, '\r') < 0:
|
if _write_byte(&writer, b'\r') < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, '\n') < 0:
|
if _write_byte(&writer, b'\n') < 0:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
for key, val in headers.items():
|
for key, val in headers.items():
|
||||||
if _write_str(&writer, to_str(key)) < 0:
|
if _write_str(&writer, to_str(key)) < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, ':') < 0:
|
if _write_byte(&writer, b':') < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, ' ') < 0:
|
if _write_byte(&writer, b' ') < 0:
|
||||||
raise
|
raise
|
||||||
if _write_str(&writer, to_str(val)) < 0:
|
if _write_str(&writer, to_str(val)) < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, '\r') < 0:
|
if _write_byte(&writer, b'\r') < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, '\n') < 0:
|
if _write_byte(&writer, b'\n') < 0:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if _write_byte(&writer, '\r') < 0:
|
if _write_byte(&writer, b'\r') < 0:
|
||||||
raise
|
raise
|
||||||
if _write_byte(&writer, '\n') < 0:
|
if _write_byte(&writer, b'\n') < 0:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
||||||
|
@ -1,17 +1,4 @@
|
|||||||
/* Generated by Cython 0.29.2 */
|
/* Generated by Cython 0.29.13 */
|
||||||
|
|
||||||
/* BEGIN: Cython Metadata
|
|
||||||
{
|
|
||||||
"distutils": {
|
|
||||||
"depends": [],
|
|
||||||
"name": "aiohttp._websocket",
|
|
||||||
"sources": [
|
|
||||||
"aiohttp/_websocket.pyx"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"module_name": "aiohttp._websocket"
|
|
||||||
}
|
|
||||||
END: Cython Metadata */
|
|
||||||
|
|
||||||
#define PY_SSIZE_T_CLEAN
|
#define PY_SSIZE_T_CLEAN
|
||||||
#include "Python.h"
|
#include "Python.h"
|
||||||
@ -20,9 +7,9 @@ END: Cython Metadata */
|
|||||||
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
|
||||||
#error Cython requires Python 2.6+ or Python 3.3+.
|
#error Cython requires Python 2.6+ or Python 3.3+.
|
||||||
#else
|
#else
|
||||||
#define CYTHON_ABI "0_29_2"
|
#define CYTHON_ABI "0_29_13"
|
||||||
#define CYTHON_HEX_VERSION 0x001D02F0
|
#define CYTHON_HEX_VERSION 0x001D0DF0
|
||||||
#define CYTHON_FUTURE_DIVISION 0
|
#define CYTHON_FUTURE_DIVISION 1
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#ifndef offsetof
|
#ifndef offsetof
|
||||||
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
|
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
|
||||||
@ -323,8 +310,13 @@ END: Cython Metadata */
|
|||||||
#define __Pyx_DefaultClassType PyClass_Type
|
#define __Pyx_DefaultClassType PyClass_Type
|
||||||
#else
|
#else
|
||||||
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
|
||||||
|
#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
|
||||||
|
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||||
|
PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||||
|
#else
|
||||||
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
|
||||||
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
|
||||||
|
#endif
|
||||||
#define __Pyx_DefaultClassType PyType_Type
|
#define __Pyx_DefaultClassType PyType_Type
|
||||||
#endif
|
#endif
|
||||||
#ifndef Py_TPFLAGS_CHECKTYPES
|
#ifndef Py_TPFLAGS_CHECKTYPES
|
||||||
@ -359,26 +351,6 @@ END: Cython Metadata */
|
|||||||
#else
|
#else
|
||||||
#define __Pyx_PyFastCFunction_Check(func) 0
|
#define __Pyx_PyFastCFunction_Check(func) 0
|
||||||
#endif
|
#endif
|
||||||
#if CYTHON_USE_DICT_VERSIONS
|
|
||||||
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
|
|
||||||
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
|
|
||||||
(version_var) = __PYX_GET_DICT_VERSION(dict);\
|
|
||||||
(cache_var) = (value);
|
|
||||||
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
|
|
||||||
static PY_UINT64_T __pyx_dict_version = 0;\
|
|
||||||
static PyObject *__pyx_dict_cached_value = NULL;\
|
|
||||||
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
|
|
||||||
(VAR) = __pyx_dict_cached_value;\
|
|
||||||
} else {\
|
|
||||||
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
|
|
||||||
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
|
|
||||||
}\
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
#define __PYX_GET_DICT_VERSION(dict) (0)
|
|
||||||
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
|
|
||||||
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
|
|
||||||
#endif
|
|
||||||
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
|
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
|
||||||
#define PyObject_Malloc(s) PyMem_Malloc(s)
|
#define PyObject_Malloc(s) PyMem_Malloc(s)
|
||||||
#define PyObject_Free(p) PyMem_Free(p)
|
#define PyObject_Free(p) PyMem_Free(p)
|
||||||
@ -411,7 +383,7 @@ END: Cython Metadata */
|
|||||||
typedef int Py_tss_t;
|
typedef int Py_tss_t;
|
||||||
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
|
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
|
||||||
*key = PyThread_create_key();
|
*key = PyThread_create_key();
|
||||||
return 0; // PyThread_create_key reports success always
|
return 0;
|
||||||
}
|
}
|
||||||
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
|
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
|
||||||
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
|
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
|
||||||
@ -434,7 +406,7 @@ static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
|
|||||||
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
|
||||||
return PyThread_get_key_value(*key);
|
return PyThread_get_key_value(*key);
|
||||||
}
|
}
|
||||||
#endif // TSS (Thread Specific Storage) API
|
#endif
|
||||||
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
|
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
|
||||||
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
|
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
|
||||||
#else
|
#else
|
||||||
@ -637,7 +609,8 @@ typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* enc
|
|||||||
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
|
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
|
||||||
|
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
|
||||||
|
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
|
||||||
#define __PYX_DEFAULT_STRING_ENCODING ""
|
#define __PYX_DEFAULT_STRING_ENCODING ""
|
||||||
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
|
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
|
||||||
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
|
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
|
||||||
@ -938,7 +911,7 @@ static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObje
|
|||||||
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
|
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
|
||||||
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
|
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
|
||||||
#if 1 || PY_VERSION_HEX < 0x030600B1
|
#if 1 || PY_VERSION_HEX < 0x030600B1
|
||||||
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs);
|
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
|
||||||
#else
|
#else
|
||||||
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
|
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
|
||||||
#endif
|
#endif
|
||||||
@ -982,6 +955,32 @@ enum __Pyx_ImportType_CheckSize {
|
|||||||
static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);
|
static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/* PyDictVersioning.proto */
|
||||||
|
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
|
||||||
|
#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
|
||||||
|
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
|
||||||
|
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
|
||||||
|
(version_var) = __PYX_GET_DICT_VERSION(dict);\
|
||||||
|
(cache_var) = (value);
|
||||||
|
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
|
||||||
|
static PY_UINT64_T __pyx_dict_version = 0;\
|
||||||
|
static PyObject *__pyx_dict_cached_value = NULL;\
|
||||||
|
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
|
||||||
|
(VAR) = __pyx_dict_cached_value;\
|
||||||
|
} else {\
|
||||||
|
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
|
||||||
|
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
|
||||||
|
}\
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
|
||||||
|
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
|
||||||
|
#else
|
||||||
|
#define __PYX_GET_DICT_VERSION(dict) (0)
|
||||||
|
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
|
||||||
|
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
|
||||||
|
#endif
|
||||||
|
|
||||||
/* PyThreadStateGet.proto */
|
/* PyThreadStateGet.proto */
|
||||||
#if CYTHON_FAST_THREAD_STATE
|
#if CYTHON_FAST_THREAD_STATE
|
||||||
#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
|
#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
|
||||||
@ -1946,10 +1945,9 @@ if (!__Pyx_RefNanny) {
|
|||||||
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
Py_INCREF(__pyx_d);
|
Py_INCREF(__pyx_d);
|
||||||
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
|
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
|
|
||||||
#if CYTHON_COMPILING_IN_PYPY
|
|
||||||
Py_INCREF(__pyx_b);
|
Py_INCREF(__pyx_b);
|
||||||
#endif
|
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
|
Py_INCREF(__pyx_cython_runtime);
|
||||||
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
|
||||||
/*--- Initialize various global constants etc. ---*/
|
/*--- Initialize various global constants etc. ---*/
|
||||||
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
||||||
@ -1968,9 +1966,9 @@ if (!__Pyx_RefNanny) {
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
/*--- Builtin init code ---*/
|
/*--- Builtin init code ---*/
|
||||||
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;
|
||||||
/*--- Constants init code ---*/
|
/*--- Constants init code ---*/
|
||||||
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
|
if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;
|
||||||
/*--- Global type/function init code ---*/
|
/*--- Global type/function init code ---*/
|
||||||
(void)__Pyx_modinit_global_init_code();
|
(void)__Pyx_modinit_global_init_code();
|
||||||
(void)__Pyx_modinit_variable_export_code();
|
(void)__Pyx_modinit_variable_export_code();
|
||||||
@ -2272,7 +2270,7 @@ static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
#if 1 || PY_VERSION_HEX < 0x030600B1
|
#if 1 || PY_VERSION_HEX < 0x030600B1
|
||||||
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) {
|
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
|
||||||
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
|
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
|
||||||
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
PyObject *globals = PyFunction_GET_GLOBALS(func);
|
||||||
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
|
||||||
@ -2343,12 +2341,12 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args,
|
|||||||
}
|
}
|
||||||
#if PY_MAJOR_VERSION >= 3
|
#if PY_MAJOR_VERSION >= 3
|
||||||
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
|
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
|
||||||
args, nargs,
|
args, (int)nargs,
|
||||||
k, (int)nk,
|
k, (int)nk,
|
||||||
d, (int)nd, kwdefs, closure);
|
d, (int)nd, kwdefs, closure);
|
||||||
#else
|
#else
|
||||||
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
|
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
|
||||||
args, nargs,
|
args, (int)nargs,
|
||||||
k, (int)nk,
|
k, (int)nk,
|
||||||
d, (int)nd, closure);
|
d, (int)nd, closure);
|
||||||
#endif
|
#endif
|
||||||
@ -2501,6 +2499,32 @@ bad:
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/* PyDictVersioning */
|
||||||
|
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
|
||||||
|
PyObject *dict = Py_TYPE(obj)->tp_dict;
|
||||||
|
return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
|
||||||
|
PyObject **dictptr = NULL;
|
||||||
|
Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
|
||||||
|
if (offset) {
|
||||||
|
#if CYTHON_COMPILING_IN_CPYTHON
|
||||||
|
dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
|
||||||
|
#else
|
||||||
|
dictptr = _PyObject_GetDictPtr(obj);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
|
||||||
|
}
|
||||||
|
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
|
||||||
|
PyObject *dict = Py_TYPE(obj)->tp_dict;
|
||||||
|
if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
|
||||||
|
return 0;
|
||||||
|
return obj_dict_version == __Pyx_get_object_dict_version(obj);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/* PyErrFetchRestore */
|
/* PyErrFetchRestore */
|
||||||
#if CYTHON_FAST_THREAD_STATE
|
#if CYTHON_FAST_THREAD_STATE
|
||||||
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
|
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
|
||||||
|
Binary file not shown.
@ -6,7 +6,7 @@ from .tcp_helpers import tcp_nodelay
|
|||||||
|
|
||||||
class BaseProtocol(asyncio.Protocol):
|
class BaseProtocol(asyncio.Protocol):
|
||||||
__slots__ = ('_loop', '_paused', '_drain_waiter',
|
__slots__ = ('_loop', '_paused', '_drain_waiter',
|
||||||
'_connection_lost', 'transport')
|
'_connection_lost', '_reading_paused', 'transport')
|
||||||
|
|
||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||||
self._loop = loop # type: asyncio.AbstractEventLoop
|
self._loop = loop # type: asyncio.AbstractEventLoop
|
||||||
|
@ -31,36 +31,45 @@ from yarl import URL
|
|||||||
|
|
||||||
from . import hdrs, http, payload
|
from . import hdrs, http, payload
|
||||||
from .abc import AbstractCookieJar
|
from .abc import AbstractCookieJar
|
||||||
|
from .client_exceptions import ClientConnectionError as ClientConnectionError
|
||||||
from .client_exceptions import (
|
from .client_exceptions import (
|
||||||
ClientConnectionError,
|
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
||||||
ClientConnectorCertificateError,
|
|
||||||
ClientConnectorError,
|
|
||||||
ClientConnectorSSLError,
|
|
||||||
ClientError,
|
|
||||||
ClientHttpProxyError,
|
|
||||||
ClientOSError,
|
|
||||||
ClientPayloadError,
|
|
||||||
ClientProxyConnectionError,
|
|
||||||
ClientResponseError,
|
|
||||||
ClientSSLError,
|
|
||||||
ContentTypeError,
|
|
||||||
InvalidURL,
|
|
||||||
ServerConnectionError,
|
|
||||||
ServerDisconnectedError,
|
|
||||||
ServerFingerprintMismatch,
|
|
||||||
ServerTimeoutError,
|
|
||||||
TooManyRedirects,
|
|
||||||
WSServerHandshakeError,
|
|
||||||
)
|
)
|
||||||
from .client_reqrep import (
|
from .client_exceptions import ClientConnectorError as ClientConnectorError
|
||||||
ClientRequest,
|
from .client_exceptions import (
|
||||||
ClientResponse,
|
ClientConnectorSSLError as ClientConnectorSSLError,
|
||||||
Fingerprint,
|
|
||||||
RequestInfo,
|
|
||||||
_merge_ssl_params,
|
|
||||||
)
|
)
|
||||||
from .client_ws import ClientWebSocketResponse
|
from .client_exceptions import ClientError as ClientError
|
||||||
from .connector import BaseConnector, TCPConnector, UnixConnector
|
from .client_exceptions import ClientHttpProxyError as ClientHttpProxyError
|
||||||
|
from .client_exceptions import ClientOSError as ClientOSError
|
||||||
|
from .client_exceptions import ClientPayloadError as ClientPayloadError
|
||||||
|
from .client_exceptions import (
|
||||||
|
ClientProxyConnectionError as ClientProxyConnectionError,
|
||||||
|
)
|
||||||
|
from .client_exceptions import ClientResponseError as ClientResponseError
|
||||||
|
from .client_exceptions import ClientSSLError as ClientSSLError
|
||||||
|
from .client_exceptions import ContentTypeError as ContentTypeError
|
||||||
|
from .client_exceptions import InvalidURL as InvalidURL
|
||||||
|
from .client_exceptions import ServerConnectionError as ServerConnectionError
|
||||||
|
from .client_exceptions import (
|
||||||
|
ServerDisconnectedError as ServerDisconnectedError,
|
||||||
|
)
|
||||||
|
from .client_exceptions import (
|
||||||
|
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
||||||
|
)
|
||||||
|
from .client_exceptions import ServerTimeoutError as ServerTimeoutError
|
||||||
|
from .client_exceptions import TooManyRedirects as TooManyRedirects
|
||||||
|
from .client_exceptions import WSServerHandshakeError as WSServerHandshakeError
|
||||||
|
from .client_reqrep import ClientRequest as ClientRequest
|
||||||
|
from .client_reqrep import ClientResponse as ClientResponse
|
||||||
|
from .client_reqrep import Fingerprint as Fingerprint
|
||||||
|
from .client_reqrep import RequestInfo as RequestInfo
|
||||||
|
from .client_reqrep import _merge_ssl_params
|
||||||
|
from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
|
||||||
|
from .connector import BaseConnector as BaseConnector
|
||||||
|
from .connector import NamedPipeConnector as NamedPipeConnector
|
||||||
|
from .connector import TCPConnector as TCPConnector
|
||||||
|
from .connector import UnixConnector as UnixConnector
|
||||||
from .cookiejar import CookieJar
|
from .cookiejar import CookieJar
|
||||||
from .helpers import (
|
from .helpers import (
|
||||||
DEBUG,
|
DEBUG,
|
||||||
@ -114,6 +123,7 @@ __all__ = (
|
|||||||
'BaseConnector',
|
'BaseConnector',
|
||||||
'TCPConnector',
|
'TCPConnector',
|
||||||
'UnixConnector',
|
'UnixConnector',
|
||||||
|
'NamedPipeConnector',
|
||||||
# client_ws
|
# client_ws
|
||||||
'ClientWebSocketResponse',
|
'ClientWebSocketResponse',
|
||||||
# client
|
# client
|
||||||
@ -170,7 +180,6 @@ class ClientSession:
|
|||||||
'_ws_response_class', '_trace_configs'])
|
'_ws_response_class', '_trace_configs'])
|
||||||
|
|
||||||
_source_traceback = None
|
_source_traceback = None
|
||||||
_connector = None
|
|
||||||
|
|
||||||
def __init__(self, *, connector: Optional[BaseConnector]=None,
|
def __init__(self, *, connector: Optional[BaseConnector]=None,
|
||||||
loop: Optional[asyncio.AbstractEventLoop]=None,
|
loop: Optional[asyncio.AbstractEventLoop]=None,
|
||||||
@ -219,7 +228,7 @@ class ClientSession:
|
|||||||
if cookies is not None:
|
if cookies is not None:
|
||||||
self._cookie_jar.update_cookies(cookies)
|
self._cookie_jar.update_cookies(cookies)
|
||||||
|
|
||||||
self._connector = connector # type: BaseConnector
|
self._connector = connector # type: Optional[BaseConnector]
|
||||||
self._connector_owner = connector_owner
|
self._connector_owner = connector_owner
|
||||||
self._default_auth = auth
|
self._default_auth = auth
|
||||||
self._version = version
|
self._version = version
|
||||||
@ -256,10 +265,10 @@ class ClientSession:
|
|||||||
|
|
||||||
# Convert to list of tuples
|
# Convert to list of tuples
|
||||||
if headers:
|
if headers:
|
||||||
headers = CIMultiDict(headers)
|
real_headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
||||||
else:
|
else:
|
||||||
headers = CIMultiDict()
|
real_headers = CIMultiDict()
|
||||||
self._default_headers = headers
|
self._default_headers = real_headers # type: CIMultiDict[str]
|
||||||
if skip_auto_headers is not None:
|
if skip_auto_headers is not None:
|
||||||
self._skip_auto_headers = frozenset([istr(i)
|
self._skip_auto_headers = frozenset([istr(i)
|
||||||
for i in skip_auto_headers])
|
for i in skip_auto_headers])
|
||||||
@ -434,16 +443,14 @@ class ClientSession:
|
|||||||
"with AUTH argument or credentials "
|
"with AUTH argument or credentials "
|
||||||
"encoded in URL")
|
"encoded in URL")
|
||||||
|
|
||||||
session_cookies = self._cookie_jar.filter_cookies(url)
|
all_cookies = self._cookie_jar.filter_cookies(url)
|
||||||
|
|
||||||
if cookies is not None:
|
if cookies is not None:
|
||||||
tmp_cookie_jar = CookieJar()
|
tmp_cookie_jar = CookieJar()
|
||||||
tmp_cookie_jar.update_cookies(cookies)
|
tmp_cookie_jar.update_cookies(cookies)
|
||||||
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
||||||
if req_cookies:
|
if req_cookies:
|
||||||
session_cookies.load(req_cookies)
|
all_cookies.load(req_cookies)
|
||||||
|
|
||||||
cookies = session_cookies
|
|
||||||
|
|
||||||
if proxy is not None:
|
if proxy is not None:
|
||||||
proxy = URL(proxy)
|
proxy = URL(proxy)
|
||||||
@ -457,7 +464,7 @@ class ClientSession:
|
|||||||
req = self._request_class(
|
req = self._request_class(
|
||||||
method, url, params=params, headers=headers,
|
method, url, params=params, headers=headers,
|
||||||
skip_auto_headers=skip_headers, data=data,
|
skip_auto_headers=skip_headers, data=data,
|
||||||
cookies=cookies, auth=auth, version=version,
|
cookies=all_cookies, auth=auth, version=version,
|
||||||
compress=compress, chunked=chunked,
|
compress=compress, chunked=chunked,
|
||||||
expect100=expect100, loop=self._loop,
|
expect100=expect100, loop=self._loop,
|
||||||
response_class=self._response_class,
|
response_class=self._response_class,
|
||||||
@ -1040,8 +1047,13 @@ class _SessionRequestContextManager:
|
|||||||
self._session = session
|
self._session = session
|
||||||
|
|
||||||
async def __aenter__(self) -> ClientResponse:
|
async def __aenter__(self) -> ClientResponse:
|
||||||
self._resp = await self._coro
|
try:
|
||||||
return self._resp
|
self._resp = await self._coro
|
||||||
|
except BaseException:
|
||||||
|
await self._session.close()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
return self._resp
|
||||||
|
|
||||||
async def __aexit__(self,
|
async def __aexit__(self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[Type[BaseException]],
|
||||||
|
@ -72,8 +72,21 @@ class ClientResponseError(ClientError):
|
|||||||
self.message = message
|
self.message = message
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.history = history
|
self.history = history
|
||||||
|
self.args = (request_info, history)
|
||||||
|
|
||||||
super().__init__("%s, message='%s'" % (self.status, message))
|
def __str__(self) -> str:
|
||||||
|
return ("%s, message=%r, url=%r" %
|
||||||
|
(self.status, self.message, self.request_info.real_url))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
args = "%r, %r" % (self.request_info, self.history)
|
||||||
|
if self.status != 0:
|
||||||
|
args += ", status=%r" % (self.status,)
|
||||||
|
if self.message != '':
|
||||||
|
args += ", message=%r" % (self.message,)
|
||||||
|
if self.headers is not None:
|
||||||
|
args += ", headers=%r" % (self.headers,)
|
||||||
|
return "%s(%s)" % (type(self).__name__, args)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def code(self) -> int:
|
def code(self) -> int:
|
||||||
@ -130,6 +143,7 @@ class ClientConnectorError(ClientOSError):
|
|||||||
self._conn_key = connection_key
|
self._conn_key = connection_key
|
||||||
self._os_error = os_error
|
self._os_error = os_error
|
||||||
super().__init__(os_error.errno, os_error.strerror)
|
super().__init__(os_error.errno, os_error.strerror)
|
||||||
|
self.args = (connection_key, os_error)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def os_error(self) -> OSError:
|
def os_error(self) -> OSError:
|
||||||
@ -148,8 +162,12 @@ class ClientConnectorError(ClientOSError):
|
|||||||
return self._conn_key.ssl
|
return self._conn_key.ssl
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]'
|
return ('Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]'
|
||||||
.format(self, self.strerror))
|
.format(self, self.ssl if self.ssl is not None else 'default',
|
||||||
|
self.strerror))
|
||||||
|
|
||||||
|
# OSError.__reduce__ does too much black magick
|
||||||
|
__reduce__ = BaseException.__reduce__
|
||||||
|
|
||||||
|
|
||||||
class ClientProxyConnectionError(ClientConnectorError):
|
class ClientProxyConnectionError(ClientConnectorError):
|
||||||
@ -169,6 +187,10 @@ class ServerDisconnectedError(ServerConnectionError):
|
|||||||
|
|
||||||
def __init__(self, message: Optional[str]=None) -> None:
|
def __init__(self, message: Optional[str]=None) -> None:
|
||||||
self.message = message
|
self.message = message
|
||||||
|
if message is None:
|
||||||
|
self.args = ()
|
||||||
|
else:
|
||||||
|
self.args = (message,)
|
||||||
|
|
||||||
|
|
||||||
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
||||||
@ -184,9 +206,10 @@ class ServerFingerprintMismatch(ServerConnectionError):
|
|||||||
self.got = got
|
self.got = got
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
|
self.args = (expected, got, host, port)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return '<{} expected={} got={} host={} port={}>'.format(
|
return '<{} expected={!r} got={!r} host={!r} port={!r}>'.format(
|
||||||
self.__class__.__name__, self.expected, self.got,
|
self.__class__.__name__, self.expected, self.got,
|
||||||
self.host, self.port)
|
self.host, self.port)
|
||||||
|
|
||||||
@ -245,6 +268,7 @@ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore
|
|||||||
ConnectionKey, certificate_error: Exception) -> None:
|
ConnectionKey, certificate_error: Exception) -> None:
|
||||||
self._conn_key = connection_key
|
self._conn_key = connection_key
|
||||||
self._certificate_error = certificate_error
|
self._certificate_error = certificate_error
|
||||||
|
self.args = (connection_key, certificate_error)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def certificate_error(self) -> Exception:
|
def certificate_error(self) -> Exception:
|
||||||
|
@ -176,6 +176,8 @@ class ResponseHandler(BaseProtocol,
|
|||||||
self._payload.set_exception(exc)
|
self._payload.set_exception(exc)
|
||||||
|
|
||||||
def data_received(self, data: bytes) -> None:
|
def data_received(self, data: bytes) -> None:
|
||||||
|
self._reschedule_timeout()
|
||||||
|
|
||||||
if not data:
|
if not data:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -220,7 +220,6 @@ class ClientRequest:
|
|||||||
body = b''
|
body = b''
|
||||||
auth = None
|
auth = None
|
||||||
response = None
|
response = None
|
||||||
response_class = None
|
|
||||||
|
|
||||||
_writer = None # async task for streaming data
|
_writer = None # async task for streaming data
|
||||||
_continue = None # waiter future for '100 Continue' response
|
_continue = None # waiter future for '100 Continue' response
|
||||||
@ -367,7 +366,7 @@ class ClientRequest:
|
|||||||
netloc = cast(str, self.url.raw_host)
|
netloc = cast(str, self.url.raw_host)
|
||||||
if helpers.is_ipv6_address(netloc):
|
if helpers.is_ipv6_address(netloc):
|
||||||
netloc = '[{}]'.format(netloc)
|
netloc = '[{}]'.format(netloc)
|
||||||
if not self.url.is_default_port():
|
if self.url.port is not None and not self.url.is_default_port():
|
||||||
netloc += ':' + str(self.url.port)
|
netloc += ':' + str(self.url.port)
|
||||||
self.headers[hdrs.HOST] = netloc
|
self.headers[hdrs.HOST] = netloc
|
||||||
|
|
||||||
@ -413,7 +412,7 @@ class ClientRequest:
|
|||||||
if isinstance(value, Morsel):
|
if isinstance(value, Morsel):
|
||||||
# Preserve coded_value
|
# Preserve coded_value
|
||||||
mrsl_val = value.get(value.key, Morsel())
|
mrsl_val = value.get(value.key, Morsel())
|
||||||
mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa
|
mrsl_val.set(value.key, value.value, value.coded_value)
|
||||||
c[name] = mrsl_val
|
c[name] = mrsl_val
|
||||||
else:
|
else:
|
||||||
c[name] = value # type: ignore
|
c[name] = value # type: ignore
|
||||||
@ -580,7 +579,11 @@ class ClientRequest:
|
|||||||
# - not CONNECT proxy must send absolute form URI
|
# - not CONNECT proxy must send absolute form URI
|
||||||
# - most common is origin form URI
|
# - most common is origin form URI
|
||||||
if self.method == hdrs.METH_CONNECT:
|
if self.method == hdrs.METH_CONNECT:
|
||||||
path = '{}:{}'.format(self.url.raw_host, self.url.port)
|
connect_host = self.url.raw_host
|
||||||
|
assert connect_host is not None
|
||||||
|
if helpers.is_ipv6_address(connect_host):
|
||||||
|
connect_host = '[{}]'.format(connect_host)
|
||||||
|
path = '{}:{}'.format(connect_host, self.url.port)
|
||||||
elif self.proxy and not self.is_ssl():
|
elif self.proxy and not self.is_ssl():
|
||||||
path = str(self.url)
|
path = str(self.url)
|
||||||
else:
|
else:
|
||||||
@ -932,7 +935,8 @@ class ClientResponse(HeadersMixin):
|
|||||||
|
|
||||||
def raise_for_status(self) -> None:
|
def raise_for_status(self) -> None:
|
||||||
if 400 <= self.status:
|
if 400 <= self.status:
|
||||||
assert self.reason # always not None for started response
|
# reason should always be not None for a started response
|
||||||
|
assert self.reason is not None
|
||||||
self.release()
|
self.release()
|
||||||
raise ClientResponseError(
|
raise ClientResponseError(
|
||||||
self.request_info,
|
self.request_info,
|
||||||
|
@ -64,7 +64,8 @@ except ImportError: # pragma: no cover
|
|||||||
SSLContext = object # type: ignore
|
SSLContext = object # type: ignore
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('BaseConnector', 'TCPConnector', 'UnixConnector')
|
__all__ = ('BaseConnector', 'TCPConnector', 'UnixConnector',
|
||||||
|
'NamedPipeConnector')
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING: # pragma: no cover
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
@ -716,7 +717,7 @@ class TCPConnector(BaseConnector):
|
|||||||
family: int=0,
|
family: int=0,
|
||||||
ssl_context: Optional[SSLContext]=None,
|
ssl_context: Optional[SSLContext]=None,
|
||||||
ssl: Union[None, bool, Fingerprint, SSLContext]=None,
|
ssl: Union[None, bool, Fingerprint, SSLContext]=None,
|
||||||
local_addr: Optional[str]=None,
|
local_addr: Optional[Tuple[str, int]]=None,
|
||||||
resolver: Optional[AbstractResolver]=None,
|
resolver: Optional[AbstractResolver]=None,
|
||||||
keepalive_timeout: Union[None, float, object]=sentinel,
|
keepalive_timeout: Union[None, float, object]=sentinel,
|
||||||
force_close: bool=False,
|
force_close: bool=False,
|
||||||
@ -797,24 +798,28 @@ class TCPConnector(BaseConnector):
|
|||||||
|
|
||||||
if (key in self._cached_hosts) and \
|
if (key in self._cached_hosts) and \
|
||||||
(not self._cached_hosts.expired(key)):
|
(not self._cached_hosts.expired(key)):
|
||||||
|
# get result early, before any await (#4014)
|
||||||
|
result = self._cached_hosts.next_addrs(key)
|
||||||
|
|
||||||
if traces:
|
if traces:
|
||||||
for trace in traces:
|
for trace in traces:
|
||||||
await trace.send_dns_cache_hit(host)
|
await trace.send_dns_cache_hit(host)
|
||||||
|
return result
|
||||||
return self._cached_hosts.next_addrs(key)
|
|
||||||
|
|
||||||
if key in self._throttle_dns_events:
|
if key in self._throttle_dns_events:
|
||||||
|
# get event early, before any await (#4014)
|
||||||
|
event = self._throttle_dns_events[key]
|
||||||
if traces:
|
if traces:
|
||||||
for trace in traces:
|
for trace in traces:
|
||||||
await trace.send_dns_cache_hit(host)
|
await trace.send_dns_cache_hit(host)
|
||||||
await self._throttle_dns_events[key].wait()
|
await event.wait()
|
||||||
else:
|
else:
|
||||||
|
# update dict early, before any await (#4014)
|
||||||
|
self._throttle_dns_events[key] = \
|
||||||
|
EventResultOrError(self._loop)
|
||||||
if traces:
|
if traces:
|
||||||
for trace in traces:
|
for trace in traces:
|
||||||
await trace.send_dns_cache_miss(host)
|
await trace.send_dns_cache_miss(host)
|
||||||
self._throttle_dns_events[key] = \
|
|
||||||
EventResultOrError(self._loop)
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
if traces:
|
if traces:
|
||||||
@ -864,7 +869,16 @@ class TCPConnector(BaseConnector):
|
|||||||
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||||
sslcontext.options |= ssl.OP_NO_SSLv2
|
sslcontext.options |= ssl.OP_NO_SSLv2
|
||||||
sslcontext.options |= ssl.OP_NO_SSLv3
|
sslcontext.options |= ssl.OP_NO_SSLv3
|
||||||
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
try:
|
||||||
|
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
||||||
|
except AttributeError as attr_err:
|
||||||
|
warnings.warn(
|
||||||
|
'{!s}: The Python interpreter is compiled '
|
||||||
|
'against OpenSSL < 1.0.0. Ref: '
|
||||||
|
'https://docs.python.org/3/library/ssl.html'
|
||||||
|
'#ssl.OP_NO_COMPRESSION'.
|
||||||
|
format(attr_err),
|
||||||
|
)
|
||||||
sslcontext.set_default_verify_paths()
|
sslcontext.set_default_verify_paths()
|
||||||
return sslcontext
|
return sslcontext
|
||||||
|
|
||||||
@ -919,9 +933,7 @@ class TCPConnector(BaseConnector):
|
|||||||
**kwargs: Any) -> Tuple[asyncio.Transport, ResponseHandler]:
|
**kwargs: Any) -> Tuple[asyncio.Transport, ResponseHandler]:
|
||||||
try:
|
try:
|
||||||
with CeilTimeout(timeout.sock_connect):
|
with CeilTimeout(timeout.sock_connect):
|
||||||
return cast(
|
return await self._loop.create_connection(*args, **kwargs) # type: ignore # noqa
|
||||||
Tuple[asyncio.Transport, ResponseHandler],
|
|
||||||
await self._loop.create_connection(*args, **kwargs))
|
|
||||||
except cert_errors as exc:
|
except cert_errors as exc:
|
||||||
raise ClientConnectorCertificateError(
|
raise ClientConnectorCertificateError(
|
||||||
req.connection_key, exc) from exc
|
req.connection_key, exc) from exc
|
||||||
@ -1126,3 +1138,56 @@ class UnixConnector(BaseConnector):
|
|||||||
raise ClientConnectorError(req.connection_key, exc) from exc
|
raise ClientConnectorError(req.connection_key, exc) from exc
|
||||||
|
|
||||||
return cast(ResponseHandler, proto)
|
return cast(ResponseHandler, proto)
|
||||||
|
|
||||||
|
|
||||||
|
class NamedPipeConnector(BaseConnector):
|
||||||
|
"""Named pipe connector.
|
||||||
|
|
||||||
|
Only supported by the proactor event loop.
|
||||||
|
See also: https://docs.python.org/3.7/library/asyncio-eventloop.html
|
||||||
|
|
||||||
|
path - Windows named pipe path.
|
||||||
|
keepalive_timeout - (optional) Keep-alive timeout.
|
||||||
|
force_close - Set to True to force close and do reconnect
|
||||||
|
after each request (and between redirects).
|
||||||
|
limit - The total number of simultaneous connections.
|
||||||
|
limit_per_host - Number of simultaneous connections to one host.
|
||||||
|
loop - Optional event loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, path: str, force_close: bool=False,
|
||||||
|
keepalive_timeout: Union[object, float, None]=sentinel,
|
||||||
|
limit: int=100, limit_per_host: int=0,
|
||||||
|
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
|
||||||
|
super().__init__(force_close=force_close,
|
||||||
|
keepalive_timeout=keepalive_timeout,
|
||||||
|
limit=limit, limit_per_host=limit_per_host, loop=loop)
|
||||||
|
if not isinstance(self._loop, asyncio.ProactorEventLoop): # type: ignore # noqa
|
||||||
|
raise RuntimeError("Named Pipes only available in proactor "
|
||||||
|
"loop under windows")
|
||||||
|
self._path = path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self) -> str:
|
||||||
|
"""Path to the named pipe."""
|
||||||
|
return self._path
|
||||||
|
|
||||||
|
async def _create_connection(self, req: 'ClientRequest',
|
||||||
|
traces: List['Trace'],
|
||||||
|
timeout: 'ClientTimeout') -> ResponseHandler:
|
||||||
|
try:
|
||||||
|
with CeilTimeout(timeout.sock_connect):
|
||||||
|
_, proto = await self._loop.create_pipe_connection( # type: ignore # noqa
|
||||||
|
self._factory, self._path
|
||||||
|
)
|
||||||
|
# the drain is required so that the connection_made is called
|
||||||
|
# and transport is set otherwise it is not set before the
|
||||||
|
# `assert conn.transport is not None`
|
||||||
|
# in client.py's _request method
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
# other option is to manually set transport like
|
||||||
|
# `proto.transport = trans`
|
||||||
|
except OSError as exc:
|
||||||
|
raise ClientConnectorError(req.connection_key, exc) from exc
|
||||||
|
|
||||||
|
return cast(ResponseHandler, proto)
|
||||||
|
@ -6,7 +6,6 @@ import pickle
|
|||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from http.cookies import BaseCookie, Morsel, SimpleCookie # noqa
|
from http.cookies import BaseCookie, Morsel, SimpleCookie # noqa
|
||||||
from math import ceil
|
|
||||||
from typing import ( # noqa
|
from typing import ( # noqa
|
||||||
DefaultDict,
|
DefaultDict,
|
||||||
Dict,
|
Dict,
|
||||||
@ -23,7 +22,7 @@ from typing import ( # noqa
|
|||||||
from yarl import URL
|
from yarl import URL
|
||||||
|
|
||||||
from .abc import AbstractCookieJar
|
from .abc import AbstractCookieJar
|
||||||
from .helpers import is_ip_address
|
from .helpers import is_ip_address, next_whole_second
|
||||||
from .typedefs import LooseCookies, PathLike
|
from .typedefs import LooseCookies, PathLike
|
||||||
|
|
||||||
__all__ = ('CookieJar', 'DummyCookieJar')
|
__all__ = ('CookieJar', 'DummyCookieJar')
|
||||||
@ -48,7 +47,8 @@ class CookieJar(AbstractCookieJar):
|
|||||||
|
|
||||||
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
||||||
|
|
||||||
MAX_TIME = 2051215261.0 # so far in future (2035-01-01)
|
MAX_TIME = datetime.datetime.max.replace(
|
||||||
|
tzinfo=datetime.timezone.utc)
|
||||||
|
|
||||||
def __init__(self, *, unsafe: bool=False,
|
def __init__(self, *, unsafe: bool=False,
|
||||||
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
|
loop: Optional[asyncio.AbstractEventLoop]=None) -> None:
|
||||||
@ -56,8 +56,8 @@ class CookieJar(AbstractCookieJar):
|
|||||||
self._cookies = defaultdict(SimpleCookie) #type: DefaultDict[str, SimpleCookie] # noqa
|
self._cookies = defaultdict(SimpleCookie) #type: DefaultDict[str, SimpleCookie] # noqa
|
||||||
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
||||||
self._unsafe = unsafe
|
self._unsafe = unsafe
|
||||||
self._next_expiration = ceil(self._loop.time())
|
self._next_expiration = next_whole_second()
|
||||||
self._expirations = {} # type: Dict[Tuple[str, str], int]
|
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime] # noqa: E501
|
||||||
|
|
||||||
def save(self, file_path: PathLike) -> None:
|
def save(self, file_path: PathLike) -> None:
|
||||||
file_path = pathlib.Path(file_path)
|
file_path = pathlib.Path(file_path)
|
||||||
@ -72,7 +72,7 @@ class CookieJar(AbstractCookieJar):
|
|||||||
def clear(self) -> None:
|
def clear(self) -> None:
|
||||||
self._cookies.clear()
|
self._cookies.clear()
|
||||||
self._host_only_cookies.clear()
|
self._host_only_cookies.clear()
|
||||||
self._next_expiration = ceil(self._loop.time())
|
self._next_expiration = next_whole_second()
|
||||||
self._expirations.clear()
|
self._expirations.clear()
|
||||||
|
|
||||||
def __iter__(self) -> 'Iterator[Morsel[str]]':
|
def __iter__(self) -> 'Iterator[Morsel[str]]':
|
||||||
@ -84,7 +84,7 @@ class CookieJar(AbstractCookieJar):
|
|||||||
return sum(1 for i in self)
|
return sum(1 for i in self)
|
||||||
|
|
||||||
def _do_expiration(self) -> None:
|
def _do_expiration(self) -> None:
|
||||||
now = self._loop.time()
|
now = datetime.datetime.now(datetime.timezone.utc)
|
||||||
if self._next_expiration > now:
|
if self._next_expiration > now:
|
||||||
return
|
return
|
||||||
if not self._expirations:
|
if not self._expirations:
|
||||||
@ -103,12 +103,16 @@ class CookieJar(AbstractCookieJar):
|
|||||||
for key in to_del:
|
for key in to_del:
|
||||||
del expirations[key]
|
del expirations[key]
|
||||||
|
|
||||||
self._next_expiration = ceil(next_expiration)
|
try:
|
||||||
|
self._next_expiration = (next_expiration.replace(microsecond=0) +
|
||||||
|
datetime.timedelta(seconds=1))
|
||||||
|
except OverflowError:
|
||||||
|
self._next_expiration = self.MAX_TIME
|
||||||
|
|
||||||
def _expire_cookie(self, when: float, domain: str, name: str) -> None:
|
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str
|
||||||
iwhen = int(when)
|
) -> None:
|
||||||
self._next_expiration = min(self._next_expiration, iwhen)
|
self._next_expiration = min(self._next_expiration, when)
|
||||||
self._expirations[(domain, name)] = iwhen
|
self._expirations[(domain, name)] = when
|
||||||
|
|
||||||
def update_cookies(self,
|
def update_cookies(self,
|
||||||
cookies: LooseCookies,
|
cookies: LooseCookies,
|
||||||
@ -166,7 +170,13 @@ class CookieJar(AbstractCookieJar):
|
|||||||
if max_age:
|
if max_age:
|
||||||
try:
|
try:
|
||||||
delta_seconds = int(max_age)
|
delta_seconds = int(max_age)
|
||||||
self._expire_cookie(self._loop.time() + delta_seconds,
|
try:
|
||||||
|
max_age_expiration = (
|
||||||
|
datetime.datetime.now(datetime.timezone.utc) +
|
||||||
|
datetime.timedelta(seconds=delta_seconds))
|
||||||
|
except OverflowError:
|
||||||
|
max_age_expiration = self.MAX_TIME
|
||||||
|
self._expire_cookie(max_age_expiration,
|
||||||
domain, name)
|
domain, name)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
cookie["max-age"] = ""
|
cookie["max-age"] = ""
|
||||||
@ -176,7 +186,7 @@ class CookieJar(AbstractCookieJar):
|
|||||||
if expires:
|
if expires:
|
||||||
expire_time = self._parse_date(expires)
|
expire_time = self._parse_date(expires)
|
||||||
if expire_time:
|
if expire_time:
|
||||||
self._expire_cookie(expire_time.timestamp(),
|
self._expire_cookie(expire_time,
|
||||||
domain, name)
|
domain, name)
|
||||||
else:
|
else:
|
||||||
cookie["expires"] = ""
|
cookie["expires"] = ""
|
||||||
@ -299,6 +309,7 @@ class CookieJar(AbstractCookieJar):
|
|||||||
month_match = cls.DATE_MONTH_RE.match(token)
|
month_match = cls.DATE_MONTH_RE.match(token)
|
||||||
if month_match:
|
if month_match:
|
||||||
found_month = True
|
found_month = True
|
||||||
|
assert month_match.lastindex is not None
|
||||||
month = month_match.lastindex
|
month = month_match.lastindex
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -1,5 +1,14 @@
|
|||||||
from typing import (Generic, Iterable, Iterator, List, MutableSequence,
|
from typing import (
|
||||||
Optional, TypeVar, Union, overload)
|
Generic,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
List,
|
||||||
|
MutableSequence,
|
||||||
|
Optional,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
_T = TypeVar('_T')
|
_T = TypeVar('_T')
|
||||||
_Arg = Union[List[_T], Iterable[_T]]
|
_Arg = Union[List[_T], Iterable[_T]]
|
||||||
@ -7,7 +16,7 @@ _Arg = Union[List[_T], Iterable[_T]]
|
|||||||
|
|
||||||
class FrozenList(MutableSequence[_T], Generic[_T]):
|
class FrozenList(MutableSequence[_T], Generic[_T]):
|
||||||
|
|
||||||
def __init__(self, items: Optional[_Arg[_T]]=None) -> None: ...
|
def __init__(self, items: Optional[_Arg[_T]]=...) -> None: ...
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def frozen(self) -> bool: ...
|
def frozen(self) -> bool: ...
|
||||||
|
@ -4,6 +4,7 @@ import asyncio
|
|||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
import cgi
|
import cgi
|
||||||
|
import datetime
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import netrc
|
import netrc
|
||||||
@ -52,6 +53,7 @@ __all__ = ('BasicAuth', 'ChainMapProxy')
|
|||||||
|
|
||||||
PY_36 = sys.version_info >= (3, 6)
|
PY_36 = sys.version_info >= (3, 6)
|
||||||
PY_37 = sys.version_info >= (3, 7)
|
PY_37 = sys.version_info >= (3, 7)
|
||||||
|
PY_38 = sys.version_info >= (3, 8)
|
||||||
|
|
||||||
if not PY_37:
|
if not PY_37:
|
||||||
import idna_ssl
|
import idna_ssl
|
||||||
@ -66,7 +68,7 @@ except ImportError:
|
|||||||
def all_tasks(
|
def all_tasks(
|
||||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||||
) -> Set['asyncio.Task[Any]']:
|
) -> Set['asyncio.Task[Any]']:
|
||||||
tasks = list(asyncio.Task.all_tasks(loop)) # type: ignore
|
tasks = list(asyncio.Task.all_tasks(loop))
|
||||||
return {t for t in tasks if not t.done()}
|
return {t for t in tasks if not t.done()}
|
||||||
|
|
||||||
|
|
||||||
@ -257,7 +259,7 @@ def current_task(loop: Optional[asyncio.AbstractEventLoop]=None) -> asyncio.Task
|
|||||||
if PY_37:
|
if PY_37:
|
||||||
return asyncio.current_task(loop=loop) # type: ignore
|
return asyncio.current_task(loop=loop) # type: ignore
|
||||||
else:
|
else:
|
||||||
return asyncio.Task.current_task(loop=loop) # type: ignore
|
return asyncio.Task.current_task(loop=loop)
|
||||||
|
|
||||||
|
|
||||||
def get_running_loop(
|
def get_running_loop(
|
||||||
@ -428,7 +430,7 @@ _ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE)
|
|||||||
|
|
||||||
def _is_ip_address(
|
def _is_ip_address(
|
||||||
regex: Pattern[str], regexb: Pattern[bytes],
|
regex: Pattern[str], regexb: Pattern[bytes],
|
||||||
host: Optional[Union[str, bytes]])-> bool:
|
host: Optional[Union[str, bytes]]) -> bool:
|
||||||
if host is None:
|
if host is None:
|
||||||
return False
|
return False
|
||||||
if isinstance(host, str):
|
if isinstance(host, str):
|
||||||
@ -449,8 +451,17 @@ def is_ip_address(
|
|||||||
return is_ipv4_address(host) or is_ipv6_address(host)
|
return is_ipv4_address(host) or is_ipv6_address(host)
|
||||||
|
|
||||||
|
|
||||||
_cached_current_datetime = None
|
def next_whole_second() -> datetime.datetime:
|
||||||
_cached_formatted_datetime = None
|
"""Return current time rounded up to the next whole second."""
|
||||||
|
return (
|
||||||
|
datetime.datetime.now(
|
||||||
|
datetime.timezone.utc).replace(microsecond=0) +
|
||||||
|
datetime.timedelta(seconds=0)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_cached_current_datetime = None # type: Optional[int]
|
||||||
|
_cached_formatted_datetime = ""
|
||||||
|
|
||||||
|
|
||||||
def rfc822_formatted_time() -> str:
|
def rfc822_formatted_time() -> str:
|
||||||
@ -467,12 +478,12 @@ def rfc822_formatted_time() -> str:
|
|||||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
|
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
|
||||||
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
|
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
|
||||||
|
|
||||||
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(now) # type: ignore # noqa
|
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
||||||
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||||
_weekdayname[wd], day, _monthname[month], year, hh, mm, ss
|
_weekdayname[wd], day, _monthname[month], year, hh, mm, ss
|
||||||
)
|
)
|
||||||
_cached_current_datetime = now
|
_cached_current_datetime = now
|
||||||
return _cached_formatted_datetime # type: ignore
|
return _cached_formatted_datetime
|
||||||
|
|
||||||
|
|
||||||
def _weakref_handle(info): # type: ignore
|
def _weakref_handle(info): # type: ignore
|
||||||
|
@ -3,34 +3,28 @@ import sys
|
|||||||
from typing import Mapping, Tuple # noqa
|
from typing import Mapping, Tuple # noqa
|
||||||
|
|
||||||
from . import __version__
|
from . import __version__
|
||||||
from .http_exceptions import HttpProcessingError
|
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
||||||
from .http_parser import (
|
from .http_parser import HeadersParser as HeadersParser
|
||||||
HeadersParser,
|
from .http_parser import HttpParser as HttpParser
|
||||||
HttpParser,
|
from .http_parser import HttpRequestParser as HttpRequestParser
|
||||||
HttpRequestParser,
|
from .http_parser import HttpResponseParser as HttpResponseParser
|
||||||
HttpResponseParser,
|
from .http_parser import RawRequestMessage as RawRequestMessage
|
||||||
RawRequestMessage,
|
from .http_parser import RawResponseMessage as RawResponseMessage
|
||||||
RawResponseMessage,
|
from .http_websocket import WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE
|
||||||
)
|
from .http_websocket import WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE
|
||||||
from .http_websocket import (
|
from .http_websocket import WS_KEY as WS_KEY
|
||||||
WS_CLOSED_MESSAGE,
|
from .http_websocket import WebSocketError as WebSocketError
|
||||||
WS_CLOSING_MESSAGE,
|
from .http_websocket import WebSocketReader as WebSocketReader
|
||||||
WS_KEY,
|
from .http_websocket import WebSocketWriter as WebSocketWriter
|
||||||
WebSocketError,
|
from .http_websocket import WSCloseCode as WSCloseCode
|
||||||
WebSocketReader,
|
from .http_websocket import WSMessage as WSMessage
|
||||||
WebSocketWriter,
|
from .http_websocket import WSMsgType as WSMsgType
|
||||||
WSCloseCode,
|
from .http_websocket import ws_ext_gen as ws_ext_gen
|
||||||
WSMessage,
|
from .http_websocket import ws_ext_parse as ws_ext_parse
|
||||||
WSMsgType,
|
from .http_writer import HttpVersion as HttpVersion
|
||||||
ws_ext_gen,
|
from .http_writer import HttpVersion10 as HttpVersion10
|
||||||
ws_ext_parse,
|
from .http_writer import HttpVersion11 as HttpVersion11
|
||||||
)
|
from .http_writer import StreamWriter as StreamWriter
|
||||||
from .http_writer import (
|
|
||||||
HttpVersion,
|
|
||||||
HttpVersion10,
|
|
||||||
HttpVersion11,
|
|
||||||
StreamWriter,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE',
|
'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE',
|
||||||
|
@ -31,7 +31,11 @@ class HttpProcessingError(Exception):
|
|||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
super().__init__("%s, message='%s'" % (self.code, message))
|
def __str__(self) -> str:
|
||||||
|
return "%s, message=%r" % (self.code, self.message)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return "<%s: %s>" % (self.__class__.__name__, self)
|
||||||
|
|
||||||
|
|
||||||
class BadHttpMessage(HttpProcessingError):
|
class BadHttpMessage(HttpProcessingError):
|
||||||
@ -42,6 +46,7 @@ class BadHttpMessage(HttpProcessingError):
|
|||||||
def __init__(self, message: str, *,
|
def __init__(self, message: str, *,
|
||||||
headers: Optional[_CIMultiDict]=None) -> None:
|
headers: Optional[_CIMultiDict]=None) -> None:
|
||||||
super().__init__(message=message, headers=headers)
|
super().__init__(message=message, headers=headers)
|
||||||
|
self.args = (message,)
|
||||||
|
|
||||||
|
|
||||||
class HttpBadRequest(BadHttpMessage):
|
class HttpBadRequest(BadHttpMessage):
|
||||||
@ -74,6 +79,7 @@ class LineTooLong(BadHttpMessage):
|
|||||||
super().__init__(
|
super().__init__(
|
||||||
"Got more than %s bytes (%s) when reading %s." % (
|
"Got more than %s bytes (%s) when reading %s." % (
|
||||||
limit, actual_size, line))
|
limit, actual_size, line))
|
||||||
|
self.args = (line, limit, actual_size)
|
||||||
|
|
||||||
|
|
||||||
class InvalidHeader(BadHttpMessage):
|
class InvalidHeader(BadHttpMessage):
|
||||||
@ -83,16 +89,20 @@ class InvalidHeader(BadHttpMessage):
|
|||||||
hdr = hdr.decode('utf-8', 'surrogateescape')
|
hdr = hdr.decode('utf-8', 'surrogateescape')
|
||||||
super().__init__('Invalid HTTP Header: {}'.format(hdr))
|
super().__init__('Invalid HTTP Header: {}'.format(hdr))
|
||||||
self.hdr = hdr
|
self.hdr = hdr
|
||||||
|
self.args = (hdr,)
|
||||||
|
|
||||||
|
|
||||||
class BadStatusLine(BadHttpMessage):
|
class BadStatusLine(BadHttpMessage):
|
||||||
|
|
||||||
def __init__(self, line: str='') -> None:
|
def __init__(self, line: str='') -> None:
|
||||||
if not line:
|
if not isinstance(line, str):
|
||||||
line = repr(line)
|
line = repr(line)
|
||||||
self.args = line,
|
self.args = (line,)
|
||||||
self.line = line
|
self.line = line
|
||||||
|
|
||||||
|
__str__ = Exception.__str__
|
||||||
|
__repr__ = Exception.__repr__
|
||||||
|
|
||||||
|
|
||||||
class InvalidURLError(BadHttpMessage):
|
class InvalidURLError(BadHttpMessage):
|
||||||
pass
|
pass
|
||||||
|
@ -82,8 +82,8 @@ _WSMessageBase = collections.namedtuple('_WSMessageBase',
|
|||||||
|
|
||||||
class WSMessage(_WSMessageBase):
|
class WSMessage(_WSMessageBase):
|
||||||
|
|
||||||
def json(self, *, # type: ignore
|
def json(self, *,
|
||||||
loads: Callable[[Any], Any]=json.loads) -> None:
|
loads: Callable[[Any], Any]=json.loads) -> Any:
|
||||||
"""Return parsed JSON data.
|
"""Return parsed JSON data.
|
||||||
|
|
||||||
.. versionadded:: 0.22
|
.. versionadded:: 0.22
|
||||||
@ -100,7 +100,10 @@ class WebSocketError(Exception):
|
|||||||
|
|
||||||
def __init__(self, code: int, message: str) -> None:
|
def __init__(self, code: int, message: str) -> None:
|
||||||
self.code = code
|
self.code = code
|
||||||
super().__init__(message)
|
super().__init__(code, message)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.args[1]
|
||||||
|
|
||||||
|
|
||||||
class WSHandshakeError(Exception):
|
class WSHandshakeError(Exception):
|
||||||
@ -365,9 +368,12 @@ class WebSocketReader:
|
|||||||
left = len(self._decompressobj.unconsumed_tail)
|
left = len(self._decompressobj.unconsumed_tail)
|
||||||
raise WebSocketError(
|
raise WebSocketError(
|
||||||
WSCloseCode.MESSAGE_TOO_BIG,
|
WSCloseCode.MESSAGE_TOO_BIG,
|
||||||
"Decompressed message size exceeds limit {}".
|
"Decompressed message size {} exceeds limit {}"
|
||||||
format(self._max_msg_size + left,
|
.format(
|
||||||
self._max_msg_size))
|
self._max_msg_size + left,
|
||||||
|
self._max_msg_size
|
||||||
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
payload_merged = bytes(self._partial)
|
payload_merged = bytes(self._partial)
|
||||||
|
|
||||||
|
@ -19,7 +19,6 @@ from typing import ( # noqa
|
|||||||
Tuple,
|
Tuple,
|
||||||
Type,
|
Type,
|
||||||
Union,
|
Union,
|
||||||
cast,
|
|
||||||
)
|
)
|
||||||
from urllib.parse import parse_qsl, unquote, urlencode
|
from urllib.parse import parse_qsl, unquote, urlencode
|
||||||
|
|
||||||
@ -195,21 +194,26 @@ def content_disposition_filename(params: Mapping[str, str],
|
|||||||
|
|
||||||
|
|
||||||
class MultipartResponseWrapper:
|
class MultipartResponseWrapper:
|
||||||
"""Wrapper around the MultipartBodyReader.
|
"""Wrapper around the MultipartReader.
|
||||||
|
|
||||||
It takes care about
|
It takes care about
|
||||||
underlying connection and close it when it needs in.
|
underlying connection and close it when it needs in.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, resp: 'ClientResponse', stream: Any) -> None:
|
def __init__(
|
||||||
# TODO: add strong annotation to stream
|
self,
|
||||||
|
resp: 'ClientResponse',
|
||||||
|
stream: 'MultipartReader',
|
||||||
|
) -> None:
|
||||||
self.resp = resp
|
self.resp = resp
|
||||||
self.stream = stream
|
self.stream = stream
|
||||||
|
|
||||||
def __aiter__(self) -> 'MultipartResponseWrapper':
|
def __aiter__(self) -> 'MultipartResponseWrapper':
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def __anext__(self) -> Any:
|
async def __anext__(
|
||||||
|
self,
|
||||||
|
) -> Union['MultipartReader', 'BodyPartReader']:
|
||||||
part = await self.next()
|
part = await self.next()
|
||||||
if part is None:
|
if part is None:
|
||||||
raise StopAsyncIteration # NOQA
|
raise StopAsyncIteration # NOQA
|
||||||
@ -219,7 +223,9 @@ class MultipartResponseWrapper:
|
|||||||
"""Returns True when all response data had been read."""
|
"""Returns True when all response data had been read."""
|
||||||
return self.resp.content.at_eof()
|
return self.resp.content.at_eof()
|
||||||
|
|
||||||
async def next(self) -> Any:
|
async def next(
|
||||||
|
self,
|
||||||
|
) -> Optional[Union['MultipartReader', 'BodyPartReader']]:
|
||||||
"""Emits next multipart reader object."""
|
"""Emits next multipart reader object."""
|
||||||
item = await self.stream.next()
|
item = await self.stream.next()
|
||||||
if self.stream.at_eof():
|
if self.stream.at_eof():
|
||||||
@ -238,7 +244,7 @@ class BodyPartReader:
|
|||||||
chunk_size = 8192
|
chunk_size = 8192
|
||||||
|
|
||||||
def __init__(self, boundary: bytes,
|
def __init__(self, boundary: bytes,
|
||||||
headers: Mapping[str, Optional[str]],
|
headers: 'CIMultiDictProxy[str]',
|
||||||
content: StreamReader) -> None:
|
content: StreamReader) -> None:
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self._boundary = boundary
|
self._boundary = boundary
|
||||||
@ -256,19 +262,19 @@ class BodyPartReader:
|
|||||||
def __aiter__(self) -> 'BodyPartReader':
|
def __aiter__(self) -> 'BodyPartReader':
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def __anext__(self) -> Any:
|
async def __anext__(self) -> bytes:
|
||||||
part = await self.next()
|
part = await self.next()
|
||||||
if part is None:
|
if part is None:
|
||||||
raise StopAsyncIteration # NOQA
|
raise StopAsyncIteration # NOQA
|
||||||
return part
|
return part
|
||||||
|
|
||||||
async def next(self) -> Any:
|
async def next(self) -> Optional[bytes]:
|
||||||
item = await self.read()
|
item = await self.read()
|
||||||
if not item:
|
if not item:
|
||||||
return None
|
return None
|
||||||
return item
|
return item
|
||||||
|
|
||||||
async def read(self, *, decode: bool=False) -> Any:
|
async def read(self, *, decode: bool=False) -> bytes:
|
||||||
"""Reads body part data.
|
"""Reads body part data.
|
||||||
|
|
||||||
decode: Decodes data following by encoding
|
decode: Decodes data following by encoding
|
||||||
@ -392,7 +398,9 @@ class BodyPartReader:
|
|||||||
encoding = encoding or self.get_charset(default='utf-8')
|
encoding = encoding or self.get_charset(default='utf-8')
|
||||||
return data.decode(encoding)
|
return data.decode(encoding)
|
||||||
|
|
||||||
async def json(self, *, encoding: Optional[str]=None) -> Any:
|
async def json(self,
|
||||||
|
*,
|
||||||
|
encoding: Optional[str]=None) -> Optional[Dict[str, Any]]:
|
||||||
"""Like read(), but assumes that body parts contains JSON data."""
|
"""Like read(), but assumes that body parts contains JSON data."""
|
||||||
data = await self.read(decode=True)
|
data = await self.read(decode=True)
|
||||||
if not data:
|
if not data:
|
||||||
@ -431,7 +439,7 @@ class BodyPartReader:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def _decode_content(self, data: bytes) -> bytes:
|
def _decode_content(self, data: bytes) -> bytes:
|
||||||
encoding = cast(str, self.headers[CONTENT_ENCODING]).lower()
|
encoding = self.headers.get(CONTENT_ENCODING, '').lower()
|
||||||
|
|
||||||
if encoding == 'deflate':
|
if encoding == 'deflate':
|
||||||
return zlib.decompress(data, -zlib.MAX_WBITS)
|
return zlib.decompress(data, -zlib.MAX_WBITS)
|
||||||
@ -443,7 +451,7 @@ class BodyPartReader:
|
|||||||
raise RuntimeError('unknown content encoding: {}'.format(encoding))
|
raise RuntimeError('unknown content encoding: {}'.format(encoding))
|
||||||
|
|
||||||
def _decode_content_transfer(self, data: bytes) -> bytes:
|
def _decode_content_transfer(self, data: bytes) -> bytes:
|
||||||
encoding = cast(str, self.headers[CONTENT_TRANSFER_ENCODING]).lower()
|
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, '').lower()
|
||||||
|
|
||||||
if encoding == 'base64':
|
if encoding == 'base64':
|
||||||
return base64.b64decode(data)
|
return base64.b64decode(data)
|
||||||
@ -521,7 +529,7 @@ class MultipartReader:
|
|||||||
self.headers = headers
|
self.headers = headers
|
||||||
self._boundary = ('--' + self._get_boundary()).encode()
|
self._boundary = ('--' + self._get_boundary()).encode()
|
||||||
self._content = content
|
self._content = content
|
||||||
self._last_part = None
|
self._last_part = None # type: Optional[Union['MultipartReader', BodyPartReader]] # noqa
|
||||||
self._at_eof = False
|
self._at_eof = False
|
||||||
self._at_bof = True
|
self._at_bof = True
|
||||||
self._unread = [] # type: List[bytes]
|
self._unread = [] # type: List[bytes]
|
||||||
@ -529,14 +537,19 @@ class MultipartReader:
|
|||||||
def __aiter__(self) -> 'MultipartReader':
|
def __aiter__(self) -> 'MultipartReader':
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def __anext__(self) -> Any:
|
async def __anext__(
|
||||||
|
self,
|
||||||
|
) -> Union['MultipartReader', BodyPartReader]:
|
||||||
part = await self.next()
|
part = await self.next()
|
||||||
if part is None:
|
if part is None:
|
||||||
raise StopAsyncIteration # NOQA
|
raise StopAsyncIteration # NOQA
|
||||||
return part
|
return part
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_response(cls, response: 'ClientResponse') -> Any:
|
def from_response(
|
||||||
|
cls,
|
||||||
|
response: 'ClientResponse',
|
||||||
|
) -> MultipartResponseWrapper:
|
||||||
"""Constructs reader instance from HTTP response.
|
"""Constructs reader instance from HTTP response.
|
||||||
|
|
||||||
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
||||||
@ -551,11 +564,13 @@ class MultipartReader:
|
|||||||
"""
|
"""
|
||||||
return self._at_eof
|
return self._at_eof
|
||||||
|
|
||||||
async def next(self) -> Any:
|
async def next(
|
||||||
|
self,
|
||||||
|
) -> Optional[Union['MultipartReader', BodyPartReader]]:
|
||||||
"""Emits the next multipart body part."""
|
"""Emits the next multipart body part."""
|
||||||
# So, if we're at BOF, we need to skip till the boundary.
|
# So, if we're at BOF, we need to skip till the boundary.
|
||||||
if self._at_eof:
|
if self._at_eof:
|
||||||
return
|
return None
|
||||||
await self._maybe_release_last_part()
|
await self._maybe_release_last_part()
|
||||||
if self._at_bof:
|
if self._at_bof:
|
||||||
await self._read_until_first_boundary()
|
await self._read_until_first_boundary()
|
||||||
@ -563,7 +578,7 @@ class MultipartReader:
|
|||||||
else:
|
else:
|
||||||
await self._read_boundary()
|
await self._read_boundary()
|
||||||
if self._at_eof: # we just read the last boundary, nothing to do there
|
if self._at_eof: # we just read the last boundary, nothing to do there
|
||||||
return
|
return None
|
||||||
self._last_part = await self.fetch_next_part()
|
self._last_part = await self.fetch_next_part()
|
||||||
return self._last_part
|
return self._last_part
|
||||||
|
|
||||||
@ -575,12 +590,17 @@ class MultipartReader:
|
|||||||
break
|
break
|
||||||
await item.release()
|
await item.release()
|
||||||
|
|
||||||
async def fetch_next_part(self) -> Any:
|
async def fetch_next_part(
|
||||||
|
self,
|
||||||
|
) -> Union['MultipartReader', BodyPartReader]:
|
||||||
"""Returns the next body part reader."""
|
"""Returns the next body part reader."""
|
||||||
headers = await self._read_headers()
|
headers = await self._read_headers()
|
||||||
return self._get_part_reader(headers)
|
return self._get_part_reader(headers)
|
||||||
|
|
||||||
def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any:
|
def _get_part_reader(
|
||||||
|
self,
|
||||||
|
headers: 'CIMultiDictProxy[str]',
|
||||||
|
) -> Union['MultipartReader', BodyPartReader]:
|
||||||
"""Dispatches the response by the `Content-Type` header, returning
|
"""Dispatches the response by the `Content-Type` header, returning
|
||||||
suitable reader instance.
|
suitable reader instance.
|
||||||
|
|
||||||
@ -718,6 +738,9 @@ class MultipartWriter(Payload):
|
|||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self._parts)
|
return len(self._parts)
|
||||||
|
|
||||||
|
def __bool__(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
_valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
_valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
||||||
_invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
|
_invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
|
||||||
|
|
||||||
@ -760,7 +783,7 @@ class MultipartWriter(Payload):
|
|||||||
def append(
|
def append(
|
||||||
self,
|
self,
|
||||||
obj: Any,
|
obj: Any,
|
||||||
headers: Optional['MultiMapping[str]']=None
|
headers: Optional[MultiMapping[str]]=None
|
||||||
) -> Payload:
|
) -> Payload:
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = CIMultiDict()
|
headers = CIMultiDict()
|
||||||
@ -779,7 +802,10 @@ class MultipartWriter(Payload):
|
|||||||
def append_payload(self, payload: Payload) -> Payload:
|
def append_payload(self, payload: Payload) -> Payload:
|
||||||
"""Adds a new body part to multipart writer."""
|
"""Adds a new body part to multipart writer."""
|
||||||
# compression
|
# compression
|
||||||
encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa
|
encoding = payload.headers.get(
|
||||||
|
CONTENT_ENCODING,
|
||||||
|
'',
|
||||||
|
).lower() # type: Optional[str]
|
||||||
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
|
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
|
||||||
raise RuntimeError('unknown content encoding: {}'.format(encoding))
|
raise RuntimeError('unknown content encoding: {}'.format(encoding))
|
||||||
if encoding == 'identity':
|
if encoding == 'identity':
|
||||||
@ -787,7 +813,9 @@ class MultipartWriter(Payload):
|
|||||||
|
|
||||||
# te encoding
|
# te encoding
|
||||||
te_encoding = payload.headers.get(
|
te_encoding = payload.headers.get(
|
||||||
CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa
|
CONTENT_TRANSFER_ENCODING,
|
||||||
|
'',
|
||||||
|
).lower() # type: Optional[str]
|
||||||
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
|
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
|
||||||
raise RuntimeError('unknown content transfer encoding: {}'
|
raise RuntimeError('unknown content transfer encoding: {}'
|
||||||
''.format(te_encoding))
|
''.format(te_encoding))
|
||||||
@ -805,7 +833,7 @@ class MultipartWriter(Payload):
|
|||||||
def append_json(
|
def append_json(
|
||||||
self,
|
self,
|
||||||
obj: Any,
|
obj: Any,
|
||||||
headers: Optional['MultiMapping[str]']=None
|
headers: Optional[MultiMapping[str]]=None
|
||||||
) -> Payload:
|
) -> Payload:
|
||||||
"""Helper to append JSON part."""
|
"""Helper to append JSON part."""
|
||||||
if headers is None:
|
if headers is None:
|
||||||
@ -817,7 +845,7 @@ class MultipartWriter(Payload):
|
|||||||
self,
|
self,
|
||||||
obj: Union[Sequence[Tuple[str, str]],
|
obj: Union[Sequence[Tuple[str, str]],
|
||||||
Mapping[str, str]],
|
Mapping[str, str]],
|
||||||
headers: Optional['MultiMapping[str]']=None
|
headers: Optional[MultiMapping[str]]=None
|
||||||
) -> Payload:
|
) -> Payload:
|
||||||
"""Helper to append form urlencoded part."""
|
"""Helper to append form urlencoded part."""
|
||||||
assert isinstance(obj, (Sequence, Mapping))
|
assert isinstance(obj, (Sequence, Mapping))
|
||||||
@ -836,9 +864,6 @@ class MultipartWriter(Payload):
|
|||||||
@property
|
@property
|
||||||
def size(self) -> Optional[int]:
|
def size(self) -> Optional[int]:
|
||||||
"""Size of the payload."""
|
"""Size of the payload."""
|
||||||
if not self._parts:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
total = 0
|
total = 0
|
||||||
for part, encoding, te_encoding in self._parts:
|
for part, encoding, te_encoding in self._parts:
|
||||||
if encoding or te_encoding or part.size is None:
|
if encoding or te_encoding or part.size is None:
|
||||||
@ -856,9 +881,6 @@ class MultipartWriter(Payload):
|
|||||||
async def write(self, writer: Any,
|
async def write(self, writer: Any,
|
||||||
close_boundary: bool=True) -> None:
|
close_boundary: bool=True) -> None:
|
||||||
"""Write body."""
|
"""Write body."""
|
||||||
if not self._parts:
|
|
||||||
return
|
|
||||||
|
|
||||||
for part, encoding, te_encoding in self._parts:
|
for part, encoding, te_encoding in self._parts:
|
||||||
await writer.write(b'--' + self._boundary + b'\r\n')
|
await writer.write(b'--' + self._boundary + b'\r\n')
|
||||||
await writer.write(part._binary_headers)
|
await writer.write(part._binary_headers)
|
||||||
|
@ -207,7 +207,7 @@ class BytesPayload(Payload):
|
|||||||
*args: Any,
|
*args: Any,
|
||||||
**kwargs: Any) -> None:
|
**kwargs: Any) -> None:
|
||||||
if not isinstance(value, (bytes, bytearray, memoryview)):
|
if not isinstance(value, (bytes, bytearray, memoryview)):
|
||||||
raise TypeError("value argument must be byte-ish, not (!r)"
|
raise TypeError("value argument must be byte-ish, not {!r}"
|
||||||
.format(type(value)))
|
.format(type(value)))
|
||||||
|
|
||||||
if 'content_type' not in kwargs:
|
if 'content_type' not in kwargs:
|
||||||
|
@ -5,7 +5,7 @@ from collections.abc import Callable
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from aiohttp.helpers import isasyncgenfunction
|
from aiohttp.helpers import PY_37, isasyncgenfunction
|
||||||
from aiohttp.web import Application
|
from aiohttp.web import Application
|
||||||
|
|
||||||
from .test_utils import (
|
from .test_utils import (
|
||||||
@ -161,7 +161,8 @@ def pytest_pyfunc_call(pyfuncitem): # type: ignore
|
|||||||
"""
|
"""
|
||||||
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
||||||
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
||||||
existing_loop = pyfuncitem.funcargs.get('loop', None)
|
existing_loop = pyfuncitem.funcargs.get('proactor_loop')\
|
||||||
|
or pyfuncitem.funcargs.get('loop', None)
|
||||||
with _runtime_warning_context():
|
with _runtime_warning_context():
|
||||||
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
||||||
testargs = {arg: pyfuncitem.funcargs[arg]
|
testargs = {arg: pyfuncitem.funcargs[arg]
|
||||||
@ -216,6 +217,20 @@ def loop(loop_factory, fast, loop_debug): # type: ignore
|
|||||||
yield _loop
|
yield _loop
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def proactor_loop(): # type: ignore
|
||||||
|
if not PY_37:
|
||||||
|
policy = asyncio.get_event_loop_policy()
|
||||||
|
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore
|
||||||
|
else:
|
||||||
|
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore
|
||||||
|
asyncio.set_event_loop_policy(policy)
|
||||||
|
|
||||||
|
with loop_context(policy.new_event_loop) as _loop:
|
||||||
|
asyncio.set_event_loop(_loop)
|
||||||
|
yield _loop
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
|
def unused_port(aiohttp_unused_port): # type: ignore # pragma: no cover
|
||||||
warnings.warn("Deprecated, use aiohttp_unused_port fixture instead",
|
warnings.warn("Deprecated, use aiohttp_unused_port fixture instead",
|
||||||
|
@ -2,7 +2,6 @@ from typing import Any, Generic, TypeVar
|
|||||||
|
|
||||||
from aiohttp.frozenlist import FrozenList
|
from aiohttp.frozenlist import FrozenList
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('Signal',)
|
__all__ = ('Signal',)
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@ try: # pragma: no cover
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
from typing_extensions import Deque # noqa
|
from typing_extensions import Deque # noqa
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
'EMPTY_PAYLOAD', 'EofStream', 'StreamReader', 'DataQueue',
|
'EMPTY_PAYLOAD', 'EofStream', 'StreamReader', 'DataQueue',
|
||||||
'FlowControlDataQueue')
|
'FlowControlDataQueue')
|
||||||
@ -122,8 +121,8 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
self._buffer = collections.deque() # type: Deque[bytes]
|
self._buffer = collections.deque() # type: Deque[bytes]
|
||||||
self._buffer_offset = 0
|
self._buffer_offset = 0
|
||||||
self._eof = False
|
self._eof = False
|
||||||
self._waiter = None # type: Optional[asyncio.Future[bool]]
|
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||||
self._eof_waiter = None # type: Optional[asyncio.Future[bool]]
|
self._eof_waiter = None # type: Optional[asyncio.Future[None]]
|
||||||
self._exception = None # type: Optional[BaseException]
|
self._exception = None # type: Optional[BaseException]
|
||||||
self._timer = timer
|
self._timer = timer
|
||||||
self._eof_callbacks = [] # type: List[Callable[[], None]]
|
self._eof_callbacks = [] # type: List[Callable[[], None]]
|
||||||
@ -156,8 +155,8 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
|
|
||||||
waiter = self._eof_waiter
|
waiter = self._eof_waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
set_exception(waiter, exc)
|
|
||||||
self._eof_waiter = None
|
self._eof_waiter = None
|
||||||
|
set_exception(waiter, exc)
|
||||||
|
|
||||||
def on_eof(self, callback: Callable[[], None]) -> None:
|
def on_eof(self, callback: Callable[[], None]) -> None:
|
||||||
if self._eof:
|
if self._eof:
|
||||||
@ -174,12 +173,12 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
waiter = self._waiter
|
waiter = self._waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
self._waiter = None
|
self._waiter = None
|
||||||
set_result(waiter, True)
|
set_result(waiter, None)
|
||||||
|
|
||||||
waiter = self._eof_waiter
|
waiter = self._eof_waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
self._eof_waiter = None
|
self._eof_waiter = None
|
||||||
set_result(waiter, True)
|
set_result(waiter, None)
|
||||||
|
|
||||||
for cb in self._eof_callbacks:
|
for cb in self._eof_callbacks:
|
||||||
try:
|
try:
|
||||||
@ -240,7 +239,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
waiter = self._waiter
|
waiter = self._waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
self._waiter = None
|
self._waiter = None
|
||||||
set_result(waiter, False)
|
set_result(waiter, None)
|
||||||
|
|
||||||
if (self._size > self._high_water and
|
if (self._size > self._high_water and
|
||||||
not self._protocol._reading_paused):
|
not self._protocol._reading_paused):
|
||||||
@ -279,7 +278,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
waiter = self._waiter
|
waiter = self._waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
self._waiter = None
|
self._waiter = None
|
||||||
set_result(waiter, False)
|
set_result(waiter, None)
|
||||||
|
|
||||||
async def _wait(self, func_name: str) -> None:
|
async def _wait(self, func_name: str) -> None:
|
||||||
# StreamReader uses a future to link the protocol feed_data() method
|
# StreamReader uses a future to link the protocol feed_data() method
|
||||||
@ -388,31 +387,29 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
||||||
always False.
|
always False.
|
||||||
"""
|
"""
|
||||||
if self._exception is not None:
|
while True:
|
||||||
raise self._exception
|
if self._exception is not None:
|
||||||
|
raise self._exception
|
||||||
|
|
||||||
if not self._buffer and not self._eof:
|
|
||||||
if (self._http_chunk_splits and
|
|
||||||
self._cursor == self._http_chunk_splits[0]):
|
|
||||||
# end of http chunk without available data
|
|
||||||
self._http_chunk_splits = self._http_chunk_splits[1:]
|
|
||||||
return (b"", True)
|
|
||||||
await self._wait('readchunk')
|
|
||||||
|
|
||||||
if not self._buffer and not self._http_chunk_splits:
|
|
||||||
# end of file
|
|
||||||
return (b"", False)
|
|
||||||
elif self._http_chunk_splits is not None:
|
|
||||||
while self._http_chunk_splits:
|
while self._http_chunk_splits:
|
||||||
pos = self._http_chunk_splits[0]
|
pos = self._http_chunk_splits.pop(0)
|
||||||
self._http_chunk_splits = self._http_chunk_splits[1:]
|
|
||||||
if pos == self._cursor:
|
if pos == self._cursor:
|
||||||
return (b"", True)
|
return (b"", True)
|
||||||
if pos > self._cursor:
|
if pos > self._cursor:
|
||||||
return (self._read_nowait(pos-self._cursor), True)
|
return (self._read_nowait(pos-self._cursor), True)
|
||||||
return (self._read_nowait(-1), False)
|
internal_logger.warning('Skipping HTTP chunk end due to data '
|
||||||
else:
|
'consumption beyond chunk boundary')
|
||||||
return (self._read_nowait_chunk(-1), False)
|
|
||||||
|
if self._buffer:
|
||||||
|
return (self._read_nowait_chunk(-1), False)
|
||||||
|
# return (self._read_nowait(-1), False)
|
||||||
|
|
||||||
|
if self._eof:
|
||||||
|
# Special case for signifying EOF.
|
||||||
|
# (b'', True) is not a final return value actually.
|
||||||
|
return (b'', False)
|
||||||
|
|
||||||
|
await self._wait('readchunk')
|
||||||
|
|
||||||
async def readexactly(self, n: int) -> bytes:
|
async def readexactly(self, n: int) -> bytes:
|
||||||
if self._exception is not None:
|
if self._exception is not None:
|
||||||
@ -423,7 +420,7 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
block = await self.read(n)
|
block = await self.read(n)
|
||||||
if not block:
|
if not block:
|
||||||
partial = b''.join(blocks)
|
partial = b''.join(blocks)
|
||||||
raise asyncio.streams.IncompleteReadError(
|
raise asyncio.IncompleteReadError(
|
||||||
partial, len(partial) + n)
|
partial, len(partial) + n)
|
||||||
blocks.append(block)
|
blocks.append(block)
|
||||||
n -= len(block)
|
n -= len(block)
|
||||||
@ -462,11 +459,17 @@ class StreamReader(AsyncStreamReaderMixin):
|
|||||||
self._size -= len(data)
|
self._size -= len(data)
|
||||||
self._cursor += len(data)
|
self._cursor += len(data)
|
||||||
|
|
||||||
|
chunk_splits = self._http_chunk_splits
|
||||||
|
# Prevent memory leak: drop useless chunk splits
|
||||||
|
while chunk_splits and chunk_splits[0] < self._cursor:
|
||||||
|
chunk_splits.pop(0)
|
||||||
|
|
||||||
if self._size < self._low_water and self._protocol._reading_paused:
|
if self._size < self._low_water and self._protocol._reading_paused:
|
||||||
self._protocol.resume_reading()
|
self._protocol.resume_reading()
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _read_nowait(self, n: int) -> bytes:
|
def _read_nowait(self, n: int) -> bytes:
|
||||||
|
""" Read not more than n bytes, or whole buffer is n == -1 """
|
||||||
chunks = []
|
chunks = []
|
||||||
|
|
||||||
while self._buffer:
|
while self._buffer:
|
||||||
@ -522,7 +525,7 @@ class EmptyStreamReader(AsyncStreamReaderMixin):
|
|||||||
return (b'', True)
|
return (b'', True)
|
||||||
|
|
||||||
async def readexactly(self, n: int) -> bytes:
|
async def readexactly(self, n: int) -> bytes:
|
||||||
raise asyncio.streams.IncompleteReadError(b'', n)
|
raise asyncio.IncompleteReadError(b'', n)
|
||||||
|
|
||||||
def read_nowait(self) -> bytes:
|
def read_nowait(self) -> bytes:
|
||||||
return b''
|
return b''
|
||||||
@ -537,7 +540,7 @@ class DataQueue(Generic[_T]):
|
|||||||
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||||
self._loop = loop
|
self._loop = loop
|
||||||
self._eof = False
|
self._eof = False
|
||||||
self._waiter = None # type: Optional[asyncio.Future[bool]]
|
self._waiter = None # type: Optional[asyncio.Future[None]]
|
||||||
self._exception = None # type: Optional[BaseException]
|
self._exception = None # type: Optional[BaseException]
|
||||||
self._size = 0
|
self._size = 0
|
||||||
self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
|
self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
|
||||||
@ -560,8 +563,8 @@ class DataQueue(Generic[_T]):
|
|||||||
|
|
||||||
waiter = self._waiter
|
waiter = self._waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
set_exception(waiter, exc)
|
|
||||||
self._waiter = None
|
self._waiter = None
|
||||||
|
set_exception(waiter, exc)
|
||||||
|
|
||||||
def feed_data(self, data: _T, size: int=0) -> None:
|
def feed_data(self, data: _T, size: int=0) -> None:
|
||||||
self._size += size
|
self._size += size
|
||||||
@ -570,7 +573,7 @@ class DataQueue(Generic[_T]):
|
|||||||
waiter = self._waiter
|
waiter = self._waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
self._waiter = None
|
self._waiter = None
|
||||||
set_result(waiter, True)
|
set_result(waiter, None)
|
||||||
|
|
||||||
def feed_eof(self) -> None:
|
def feed_eof(self) -> None:
|
||||||
self._eof = True
|
self._eof = True
|
||||||
@ -578,7 +581,7 @@ class DataQueue(Generic[_T]):
|
|||||||
waiter = self._waiter
|
waiter = self._waiter
|
||||||
if waiter is not None:
|
if waiter is not None:
|
||||||
self._waiter = None
|
self._waiter = None
|
||||||
set_result(waiter, False)
|
set_result(waiter, None)
|
||||||
|
|
||||||
async def read(self) -> _T:
|
async def read(self) -> _T:
|
||||||
if not self._buffer and not self._eof:
|
if not self._buffer and not self._eof:
|
||||||
|
@ -4,6 +4,7 @@ import asyncio
|
|||||||
import contextlib
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
import gc
|
import gc
|
||||||
|
import inspect
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
@ -74,6 +75,8 @@ def unused_port() -> int:
|
|||||||
|
|
||||||
|
|
||||||
class BaseTestServer(ABC):
|
class BaseTestServer(ABC):
|
||||||
|
__test__ = False
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
*,
|
*,
|
||||||
scheme: Union[str, object]=sentinel,
|
scheme: Union[str, object]=sentinel,
|
||||||
@ -230,6 +233,7 @@ class TestClient:
|
|||||||
To write functional tests for aiohttp based servers.
|
To write functional tests for aiohttp based servers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
__test__ = False
|
||||||
|
|
||||||
def __init__(self, server: BaseTestServer, *,
|
def __init__(self, server: BaseTestServer, *,
|
||||||
cookie_jar: Optional[AbstractCookieJar]=None,
|
cookie_jar: Optional[AbstractCookieJar]=None,
|
||||||
@ -282,15 +286,8 @@ class TestClient:
|
|||||||
def make_url(self, path: str) -> URL:
|
def make_url(self, path: str) -> URL:
|
||||||
return self._server.make_url(path)
|
return self._server.make_url(path)
|
||||||
|
|
||||||
async def request(self, method: str, path: str,
|
async def _request(self, method: str, path: str,
|
||||||
**kwargs: Any) -> ClientResponse:
|
**kwargs: Any) -> ClientResponse:
|
||||||
"""Routes a request to tested http server.
|
|
||||||
|
|
||||||
The interface is identical to aiohttp.ClientSession.request,
|
|
||||||
except the loop kwarg is overridden by the instance used by the
|
|
||||||
test server.
|
|
||||||
|
|
||||||
"""
|
|
||||||
resp = await self._session.request(
|
resp = await self._session.request(
|
||||||
method, self.make_url(path), **kwargs
|
method, self.make_url(path), **kwargs
|
||||||
)
|
)
|
||||||
@ -298,46 +295,59 @@ class TestClient:
|
|||||||
self._responses.append(resp)
|
self._responses.append(resp)
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
def request(self, method: str, path: str,
|
||||||
|
**kwargs: Any) -> _RequestContextManager:
|
||||||
|
"""Routes a request to tested http server.
|
||||||
|
|
||||||
|
The interface is identical to aiohttp.ClientSession.request,
|
||||||
|
except the loop kwarg is overridden by the instance used by the
|
||||||
|
test server.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return _RequestContextManager(
|
||||||
|
self._request(method, path, **kwargs)
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP GET request."""
|
"""Perform an HTTP GET request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_GET, path, **kwargs)
|
self._request(hdrs.METH_GET, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP POST request."""
|
"""Perform an HTTP POST request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_POST, path, **kwargs)
|
self._request(hdrs.METH_POST, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP OPTIONS request."""
|
"""Perform an HTTP OPTIONS request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_OPTIONS, path, **kwargs)
|
self._request(hdrs.METH_OPTIONS, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP HEAD request."""
|
"""Perform an HTTP HEAD request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_HEAD, path, **kwargs)
|
self._request(hdrs.METH_HEAD, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP PUT request."""
|
"""Perform an HTTP PUT request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_PUT, path, **kwargs)
|
self._request(hdrs.METH_PUT, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP PATCH request."""
|
"""Perform an HTTP PATCH request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_PATCH, path, **kwargs)
|
self._request(hdrs.METH_PATCH, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
||||||
"""Perform an HTTP PATCH request."""
|
"""Perform an HTTP PATCH request."""
|
||||||
return _RequestContextManager(
|
return _RequestContextManager(
|
||||||
self.request(hdrs.METH_DELETE, path, **kwargs)
|
self._request(hdrs.METH_DELETE, path, **kwargs)
|
||||||
)
|
)
|
||||||
|
|
||||||
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
|
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
|
||||||
@ -650,10 +660,11 @@ def make_mocked_request(method: str, path: str,
|
|||||||
def make_mocked_coro(return_value: Any=sentinel,
|
def make_mocked_coro(return_value: Any=sentinel,
|
||||||
raise_exception: Any=sentinel) -> Any:
|
raise_exception: Any=sentinel) -> Any:
|
||||||
"""Creates a coroutine mock."""
|
"""Creates a coroutine mock."""
|
||||||
@asyncio.coroutine
|
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
||||||
def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
|
||||||
if raise_exception is not sentinel:
|
if raise_exception is not sentinel:
|
||||||
raise raise_exception
|
raise raise_exception
|
||||||
return return_value
|
if not inspect.isawaitable(return_value):
|
||||||
|
return return_value
|
||||||
|
await return_value
|
||||||
|
|
||||||
return mock.Mock(wraps=mock_coro)
|
return mock.Mock(wraps=mock_coro)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
from typing import TYPE_CHECKING, Awaitable, Callable, Type
|
from typing import TYPE_CHECKING, Awaitable, Callable, Type, Union
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
from multidict import CIMultiDict # noqa
|
from multidict import CIMultiDict # noqa
|
||||||
@ -11,7 +11,25 @@ from .signals import Signal
|
|||||||
if TYPE_CHECKING: # pragma: no cover
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
from .client import ClientSession # noqa
|
from .client import ClientSession # noqa
|
||||||
|
|
||||||
_Signal = Signal[Callable[['TraceConfig'], Awaitable[None]]]
|
_SignalArgs = Union[
|
||||||
|
'TraceRequestStartParams',
|
||||||
|
'TraceRequestEndParams',
|
||||||
|
'TraceRequestExceptionParams',
|
||||||
|
'TraceConnectionQueuedStartParams',
|
||||||
|
'TraceConnectionQueuedEndParams',
|
||||||
|
'TraceConnectionCreateStartParams',
|
||||||
|
'TraceConnectionCreateEndParams',
|
||||||
|
'TraceConnectionReuseconnParams',
|
||||||
|
'TraceDnsResolveHostStartParams',
|
||||||
|
'TraceDnsResolveHostEndParams',
|
||||||
|
'TraceDnsCacheHitParams',
|
||||||
|
'TraceDnsCacheMissParams',
|
||||||
|
'TraceRequestRedirectParams',
|
||||||
|
'TraceRequestChunkSentParams',
|
||||||
|
'TraceResponseChunkReceivedParams',
|
||||||
|
]
|
||||||
|
_Signal = Signal[Callable[[ClientSession, SimpleNamespace, _SignalArgs],
|
||||||
|
Awaitable[None]]]
|
||||||
else:
|
else:
|
||||||
_Signal = Signal
|
_Signal = Signal
|
||||||
|
|
||||||
|
@ -12,7 +12,13 @@ from typing import (
|
|||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
from multidict import (
|
||||||
|
CIMultiDict,
|
||||||
|
CIMultiDictProxy,
|
||||||
|
MultiDict,
|
||||||
|
MultiDictProxy,
|
||||||
|
istr,
|
||||||
|
)
|
||||||
from yarl import URL
|
from yarl import URL
|
||||||
|
|
||||||
DEFAULT_JSON_ENCODER = json.dumps
|
DEFAULT_JSON_ENCODER = json.dumps
|
||||||
@ -33,7 +39,8 @@ else:
|
|||||||
Byteish = Union[bytes, bytearray, memoryview]
|
Byteish = Union[bytes, bytearray, memoryview]
|
||||||
JSONEncoder = Callable[[Any], str]
|
JSONEncoder = Callable[[Any], str]
|
||||||
JSONDecoder = Callable[[str], Any]
|
JSONDecoder = Callable[[str], Any]
|
||||||
LooseHeaders = Union[Mapping[str, str], _CIMultiDict, _CIMultiDictProxy]
|
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict,
|
||||||
|
_CIMultiDictProxy]
|
||||||
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
||||||
StrOrURL = Union[str, URL]
|
StrOrURL = Union[str, URL]
|
||||||
LooseCookies = Union[Iterable[Tuple[str, 'BaseCookie[str]']],
|
LooseCookies = Union[Iterable[Tuple[str, 'BaseCookie[str]']],
|
||||||
|
@ -10,121 +10,138 @@ from typing import Any, Awaitable, Callable, List, Optional, Type, Union, cast
|
|||||||
from .abc import AbstractAccessLogger
|
from .abc import AbstractAccessLogger
|
||||||
from .helpers import all_tasks
|
from .helpers import all_tasks
|
||||||
from .log import access_logger
|
from .log import access_logger
|
||||||
from .web_app import Application, CleanupError
|
from .web_app import Application as Application
|
||||||
|
from .web_app import CleanupError as CleanupError
|
||||||
|
from .web_exceptions import HTTPAccepted as HTTPAccepted
|
||||||
|
from .web_exceptions import HTTPBadGateway as HTTPBadGateway
|
||||||
|
from .web_exceptions import HTTPBadRequest as HTTPBadRequest
|
||||||
|
from .web_exceptions import HTTPClientError as HTTPClientError
|
||||||
|
from .web_exceptions import HTTPConflict as HTTPConflict
|
||||||
|
from .web_exceptions import HTTPCreated as HTTPCreated
|
||||||
|
from .web_exceptions import HTTPError as HTTPError
|
||||||
|
from .web_exceptions import HTTPException as HTTPException
|
||||||
|
from .web_exceptions import HTTPExpectationFailed as HTTPExpectationFailed
|
||||||
|
from .web_exceptions import HTTPFailedDependency as HTTPFailedDependency
|
||||||
|
from .web_exceptions import HTTPForbidden as HTTPForbidden
|
||||||
|
from .web_exceptions import HTTPFound as HTTPFound
|
||||||
|
from .web_exceptions import HTTPGatewayTimeout as HTTPGatewayTimeout
|
||||||
|
from .web_exceptions import HTTPGone as HTTPGone
|
||||||
|
from .web_exceptions import HTTPInsufficientStorage as HTTPInsufficientStorage
|
||||||
|
from .web_exceptions import HTTPInternalServerError as HTTPInternalServerError
|
||||||
|
from .web_exceptions import HTTPLengthRequired as HTTPLengthRequired
|
||||||
|
from .web_exceptions import HTTPMethodNotAllowed as HTTPMethodNotAllowed
|
||||||
|
from .web_exceptions import HTTPMisdirectedRequest as HTTPMisdirectedRequest
|
||||||
|
from .web_exceptions import HTTPMovedPermanently as HTTPMovedPermanently
|
||||||
|
from .web_exceptions import HTTPMultipleChoices as HTTPMultipleChoices
|
||||||
from .web_exceptions import (
|
from .web_exceptions import (
|
||||||
HTTPAccepted,
|
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
||||||
HTTPBadGateway,
|
|
||||||
HTTPBadRequest,
|
|
||||||
HTTPClientError,
|
|
||||||
HTTPConflict,
|
|
||||||
HTTPCreated,
|
|
||||||
HTTPError,
|
|
||||||
HTTPException,
|
|
||||||
HTTPExpectationFailed,
|
|
||||||
HTTPFailedDependency,
|
|
||||||
HTTPForbidden,
|
|
||||||
HTTPFound,
|
|
||||||
HTTPGatewayTimeout,
|
|
||||||
HTTPGone,
|
|
||||||
HTTPInsufficientStorage,
|
|
||||||
HTTPInternalServerError,
|
|
||||||
HTTPLengthRequired,
|
|
||||||
HTTPMethodNotAllowed,
|
|
||||||
HTTPMisdirectedRequest,
|
|
||||||
HTTPMovedPermanently,
|
|
||||||
HTTPMultipleChoices,
|
|
||||||
HTTPNetworkAuthenticationRequired,
|
|
||||||
HTTPNoContent,
|
|
||||||
HTTPNonAuthoritativeInformation,
|
|
||||||
HTTPNotAcceptable,
|
|
||||||
HTTPNotExtended,
|
|
||||||
HTTPNotFound,
|
|
||||||
HTTPNotImplemented,
|
|
||||||
HTTPNotModified,
|
|
||||||
HTTPOk,
|
|
||||||
HTTPPartialContent,
|
|
||||||
HTTPPaymentRequired,
|
|
||||||
HTTPPermanentRedirect,
|
|
||||||
HTTPPreconditionFailed,
|
|
||||||
HTTPPreconditionRequired,
|
|
||||||
HTTPProxyAuthenticationRequired,
|
|
||||||
HTTPRedirection,
|
|
||||||
HTTPRequestEntityTooLarge,
|
|
||||||
HTTPRequestHeaderFieldsTooLarge,
|
|
||||||
HTTPRequestRangeNotSatisfiable,
|
|
||||||
HTTPRequestTimeout,
|
|
||||||
HTTPRequestURITooLong,
|
|
||||||
HTTPResetContent,
|
|
||||||
HTTPSeeOther,
|
|
||||||
HTTPServerError,
|
|
||||||
HTTPServiceUnavailable,
|
|
||||||
HTTPSuccessful,
|
|
||||||
HTTPTemporaryRedirect,
|
|
||||||
HTTPTooManyRequests,
|
|
||||||
HTTPUnauthorized,
|
|
||||||
HTTPUnavailableForLegalReasons,
|
|
||||||
HTTPUnprocessableEntity,
|
|
||||||
HTTPUnsupportedMediaType,
|
|
||||||
HTTPUpgradeRequired,
|
|
||||||
HTTPUseProxy,
|
|
||||||
HTTPVariantAlsoNegotiates,
|
|
||||||
HTTPVersionNotSupported,
|
|
||||||
)
|
)
|
||||||
from .web_fileresponse import FileResponse
|
from .web_exceptions import HTTPNoContent as HTTPNoContent
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
||||||
|
)
|
||||||
|
from .web_exceptions import HTTPNotAcceptable as HTTPNotAcceptable
|
||||||
|
from .web_exceptions import HTTPNotExtended as HTTPNotExtended
|
||||||
|
from .web_exceptions import HTTPNotFound as HTTPNotFound
|
||||||
|
from .web_exceptions import HTTPNotImplemented as HTTPNotImplemented
|
||||||
|
from .web_exceptions import HTTPNotModified as HTTPNotModified
|
||||||
|
from .web_exceptions import HTTPOk as HTTPOk
|
||||||
|
from .web_exceptions import HTTPPartialContent as HTTPPartialContent
|
||||||
|
from .web_exceptions import HTTPPaymentRequired as HTTPPaymentRequired
|
||||||
|
from .web_exceptions import HTTPPermanentRedirect as HTTPPermanentRedirect
|
||||||
|
from .web_exceptions import HTTPPreconditionFailed as HTTPPreconditionFailed
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
||||||
|
)
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
||||||
|
)
|
||||||
|
from .web_exceptions import HTTPRedirection as HTTPRedirection
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
||||||
|
)
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
||||||
|
)
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
||||||
|
)
|
||||||
|
from .web_exceptions import HTTPRequestTimeout as HTTPRequestTimeout
|
||||||
|
from .web_exceptions import HTTPRequestURITooLong as HTTPRequestURITooLong
|
||||||
|
from .web_exceptions import HTTPResetContent as HTTPResetContent
|
||||||
|
from .web_exceptions import HTTPSeeOther as HTTPSeeOther
|
||||||
|
from .web_exceptions import HTTPServerError as HTTPServerError
|
||||||
|
from .web_exceptions import HTTPServiceUnavailable as HTTPServiceUnavailable
|
||||||
|
from .web_exceptions import HTTPSuccessful as HTTPSuccessful
|
||||||
|
from .web_exceptions import HTTPTemporaryRedirect as HTTPTemporaryRedirect
|
||||||
|
from .web_exceptions import HTTPTooManyRequests as HTTPTooManyRequests
|
||||||
|
from .web_exceptions import HTTPUnauthorized as HTTPUnauthorized
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
||||||
|
)
|
||||||
|
from .web_exceptions import HTTPUnprocessableEntity as HTTPUnprocessableEntity
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
||||||
|
)
|
||||||
|
from .web_exceptions import HTTPUpgradeRequired as HTTPUpgradeRequired
|
||||||
|
from .web_exceptions import HTTPUseProxy as HTTPUseProxy
|
||||||
|
from .web_exceptions import (
|
||||||
|
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
||||||
|
)
|
||||||
|
from .web_exceptions import HTTPVersionNotSupported as HTTPVersionNotSupported
|
||||||
|
from .web_fileresponse import FileResponse as FileResponse
|
||||||
from .web_log import AccessLogger
|
from .web_log import AccessLogger
|
||||||
from .web_middlewares import middleware, normalize_path_middleware
|
from .web_middlewares import middleware as middleware
|
||||||
from .web_protocol import (
|
from .web_middlewares import (
|
||||||
PayloadAccessError,
|
normalize_path_middleware as normalize_path_middleware,
|
||||||
RequestHandler,
|
|
||||||
RequestPayloadError,
|
|
||||||
)
|
)
|
||||||
from .web_request import BaseRequest, FileField, Request
|
from .web_protocol import PayloadAccessError as PayloadAccessError
|
||||||
from .web_response import (
|
from .web_protocol import RequestHandler as RequestHandler
|
||||||
ContentCoding,
|
from .web_protocol import RequestPayloadError as RequestPayloadError
|
||||||
Response,
|
from .web_request import BaseRequest as BaseRequest
|
||||||
StreamResponse,
|
from .web_request import FileField as FileField
|
||||||
json_response,
|
from .web_request import Request as Request
|
||||||
)
|
from .web_response import ContentCoding as ContentCoding
|
||||||
from .web_routedef import (
|
from .web_response import Response as Response
|
||||||
AbstractRouteDef,
|
from .web_response import StreamResponse as StreamResponse
|
||||||
RouteDef,
|
from .web_response import json_response as json_response
|
||||||
RouteTableDef,
|
from .web_routedef import AbstractRouteDef as AbstractRouteDef
|
||||||
StaticDef,
|
from .web_routedef import RouteDef as RouteDef
|
||||||
delete,
|
from .web_routedef import RouteTableDef as RouteTableDef
|
||||||
get,
|
from .web_routedef import StaticDef as StaticDef
|
||||||
head,
|
from .web_routedef import delete as delete
|
||||||
options,
|
from .web_routedef import get as get
|
||||||
patch,
|
from .web_routedef import head as head
|
||||||
post,
|
from .web_routedef import options as options
|
||||||
put,
|
from .web_routedef import patch as patch
|
||||||
route,
|
from .web_routedef import post as post
|
||||||
static,
|
from .web_routedef import put as put
|
||||||
view,
|
from .web_routedef import route as route
|
||||||
)
|
from .web_routedef import static as static
|
||||||
from .web_runner import (
|
from .web_routedef import view as view
|
||||||
AppRunner,
|
from .web_runner import AppRunner as AppRunner
|
||||||
BaseRunner,
|
from .web_runner import BaseRunner as BaseRunner
|
||||||
BaseSite,
|
from .web_runner import BaseSite as BaseSite
|
||||||
GracefulExit,
|
from .web_runner import GracefulExit as GracefulExit
|
||||||
ServerRunner,
|
from .web_runner import NamedPipeSite as NamedPipeSite
|
||||||
SockSite,
|
from .web_runner import ServerRunner as ServerRunner
|
||||||
TCPSite,
|
from .web_runner import SockSite as SockSite
|
||||||
UnixSite,
|
from .web_runner import TCPSite as TCPSite
|
||||||
)
|
from .web_runner import UnixSite as UnixSite
|
||||||
from .web_server import Server
|
from .web_server import Server as Server
|
||||||
from .web_urldispatcher import (
|
from .web_urldispatcher import AbstractResource as AbstractResource
|
||||||
AbstractResource,
|
from .web_urldispatcher import AbstractRoute as AbstractRoute
|
||||||
AbstractRoute,
|
from .web_urldispatcher import DynamicResource as DynamicResource
|
||||||
DynamicResource,
|
from .web_urldispatcher import PlainResource as PlainResource
|
||||||
PlainResource,
|
from .web_urldispatcher import Resource as Resource
|
||||||
Resource,
|
from .web_urldispatcher import ResourceRoute as ResourceRoute
|
||||||
ResourceRoute,
|
from .web_urldispatcher import StaticResource as StaticResource
|
||||||
StaticResource,
|
from .web_urldispatcher import UrlDispatcher as UrlDispatcher
|
||||||
UrlDispatcher,
|
from .web_urldispatcher import UrlMappingMatchInfo as UrlMappingMatchInfo
|
||||||
UrlMappingMatchInfo,
|
from .web_urldispatcher import View as View
|
||||||
View,
|
from .web_ws import WebSocketReady as WebSocketReady
|
||||||
)
|
from .web_ws import WebSocketResponse as WebSocketResponse
|
||||||
from .web_ws import WebSocketReady, WebSocketResponse, WSMsgType
|
from .web_ws import WSMsgType as WSMsgType
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
# web_app
|
# web_app
|
||||||
@ -230,6 +247,7 @@ __all__ = (
|
|||||||
'SockSite',
|
'SockSite',
|
||||||
'TCPSite',
|
'TCPSite',
|
||||||
'UnixSite',
|
'UnixSite',
|
||||||
|
'NamedPipeSite',
|
||||||
# web_server
|
# web_server
|
||||||
'Server',
|
'Server',
|
||||||
# web_urldispatcher
|
# web_urldispatcher
|
||||||
|
@ -88,7 +88,7 @@ class Application(MutableMapping[str, Any]):
|
|||||||
def __init__(self, *,
|
def __init__(self, *,
|
||||||
logger: logging.Logger=web_logger,
|
logger: logging.Logger=web_logger,
|
||||||
router: Optional[UrlDispatcher]=None,
|
router: Optional[UrlDispatcher]=None,
|
||||||
middlewares: Sequence[_Middleware]=(),
|
middlewares: Iterable[_Middleware]=(),
|
||||||
handler_args: Mapping[str, Any]=None,
|
handler_args: Mapping[str, Any]=None,
|
||||||
client_max_size: int=1024**2,
|
client_max_size: int=1024**2,
|
||||||
loop: Optional[asyncio.AbstractEventLoop]=None,
|
loop: Optional[asyncio.AbstractEventLoop]=None,
|
||||||
@ -466,6 +466,9 @@ class Application(MutableMapping[str, Any]):
|
|||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "<Application 0x{:x}>".format(id(self))
|
return "<Application 0x{:x}>".format(id(self))
|
||||||
|
|
||||||
|
def __bool__(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class CleanupError(RuntimeError):
|
class CleanupError(RuntimeError):
|
||||||
@property
|
@property
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
|
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
|
||||||
|
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from .typedefs import LooseHeaders, StrOrURL
|
from .typedefs import LooseHeaders, StrOrURL
|
||||||
from .web_response import Response
|
from .web_response import Response
|
||||||
|
|
||||||
@ -161,7 +163,7 @@ class _HTTPMove(HTTPRedirection):
|
|||||||
raise ValueError("HTTP redirects need a location to redirect to.")
|
raise ValueError("HTTP redirects need a location to redirect to.")
|
||||||
super().__init__(headers=headers, reason=reason,
|
super().__init__(headers=headers, reason=reason,
|
||||||
body=body, text=text, content_type=content_type)
|
body=body, text=text, content_type=content_type)
|
||||||
self.headers['Location'] = str(location)
|
self.headers['Location'] = str(URL(location))
|
||||||
self.location = location
|
self.location = location
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,7 +24,6 @@ from .log import server_logger
|
|||||||
from .typedefs import LooseHeaders
|
from .typedefs import LooseHeaders
|
||||||
from .web_exceptions import (
|
from .web_exceptions import (
|
||||||
HTTPNotModified,
|
HTTPNotModified,
|
||||||
HTTPOk,
|
|
||||||
HTTPPartialContent,
|
HTTPPartialContent,
|
||||||
HTTPPreconditionFailed,
|
HTTPPreconditionFailed,
|
||||||
HTTPRequestRangeNotSatisfiable,
|
HTTPRequestRangeNotSatisfiable,
|
||||||
@ -245,7 +244,7 @@ class FileResponse(StreamResponse):
|
|||||||
encoding = 'gzip' if gzip else None
|
encoding = 'gzip' if gzip else None
|
||||||
should_set_ct = False
|
should_set_ct = False
|
||||||
|
|
||||||
status = HTTPOk.status_code
|
status = self._status
|
||||||
file_size = st.st_size
|
file_size = st.st_size
|
||||||
count = file_size
|
count = file_size
|
||||||
|
|
||||||
@ -318,8 +317,8 @@ class FileResponse(StreamResponse):
|
|||||||
status = HTTPPartialContent.status_code
|
status = HTTPPartialContent.status_code
|
||||||
# Even though you are sending the whole file, you should still
|
# Even though you are sending the whole file, you should still
|
||||||
# return a HTTP 206 for a Range request.
|
# return a HTTP 206 for a Range request.
|
||||||
|
self.set_status(status)
|
||||||
|
|
||||||
self.set_status(status)
|
|
||||||
if should_set_ct:
|
if should_set_ct:
|
||||||
self.content_type = ct # type: ignore
|
self.content_type = ct # type: ignore
|
||||||
if encoding:
|
if encoding:
|
||||||
@ -337,8 +336,11 @@ class FileResponse(StreamResponse):
|
|||||||
self.headers[hdrs.CONTENT_RANGE] = 'bytes {0}-{1}/{2}'.format(
|
self.headers[hdrs.CONTENT_RANGE] = 'bytes {0}-{1}/{2}'.format(
|
||||||
real_start, real_start + count - 1, file_size)
|
real_start, real_start + count - 1, file_size)
|
||||||
|
|
||||||
with (await loop.run_in_executor(None, filepath.open, 'rb')) as fobj:
|
fobj = await loop.run_in_executor(None, filepath.open, 'rb')
|
||||||
if start: # be aware that start could be None or int=0 here.
|
if start: # be aware that start could be None or int=0 here.
|
||||||
await loop.run_in_executor(None, fobj.seek, start)
|
await loop.run_in_executor(None, fobj.seek, start)
|
||||||
|
|
||||||
|
try:
|
||||||
return await self._sendfile(request, fobj, count)
|
return await self._sendfile(request, fobj, count)
|
||||||
|
finally:
|
||||||
|
await loop.run_in_executor(None, fobj.close)
|
||||||
|
@ -4,7 +4,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, Iterable, List, Tuple # noqa
|
from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
|
||||||
|
|
||||||
from .abc import AbstractAccessLogger
|
from .abc import AbstractAccessLogger
|
||||||
from .web_request import BaseRequest
|
from .web_request import BaseRequest
|
||||||
@ -226,9 +226,9 @@ class AccessLogger(AbstractAccessLogger):
|
|||||||
extra[key] = value
|
extra[key] = value
|
||||||
else:
|
else:
|
||||||
k1, k2 = key
|
k1, k2 = key
|
||||||
dct = extra.get(k1, {})
|
dct = extra.get(k1, {}) # type: Any
|
||||||
dct[k2] = value # type: ignore
|
dct[k2] = value
|
||||||
extra[k1] = dct # type: ignore
|
extra[k1] = dct
|
||||||
|
|
||||||
self.logger.info(self._log_format % tuple(values), extra=extra)
|
self.logger.info(self._log_format % tuple(values), extra=extra)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import re
|
import re
|
||||||
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
|
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
|
||||||
|
|
||||||
from .web_exceptions import HTTPMovedPermanently, _HTTPMove
|
from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
|
||||||
from .web_request import Request
|
from .web_request import Request
|
||||||
from .web_response import StreamResponse
|
from .web_response import StreamResponse
|
||||||
from .web_urldispatcher import SystemRoute
|
from .web_urldispatcher import SystemRoute
|
||||||
@ -42,7 +42,7 @@ _Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]]
|
|||||||
def normalize_path_middleware(
|
def normalize_path_middleware(
|
||||||
*, append_slash: bool=True, remove_slash: bool=False,
|
*, append_slash: bool=True, remove_slash: bool=False,
|
||||||
merge_slashes: bool=True,
|
merge_slashes: bool=True,
|
||||||
redirect_class: Type[_HTTPMove]=HTTPMovedPermanently) -> _Middleware:
|
redirect_class: Type[_HTTPMove]=HTTPPermanentRedirect) -> _Middleware:
|
||||||
"""
|
"""
|
||||||
Middleware factory which produces a middleware that normalizes
|
Middleware factory which produces a middleware that normalizes
|
||||||
the path of a request. By normalizing it means:
|
the path of a request. By normalizing it means:
|
||||||
|
@ -107,7 +107,7 @@ class RequestHandler(BaseProtocol):
|
|||||||
"""
|
"""
|
||||||
KEEPALIVE_RESCHEDULE_DELAY = 1
|
KEEPALIVE_RESCHEDULE_DELAY = 1
|
||||||
|
|
||||||
__slots__ = ('_request_count', '_keep_alive', '_manager',
|
__slots__ = ('_request_count', '_keepalive', '_manager',
|
||||||
'_request_handler', '_request_factory', '_tcp_keepalive',
|
'_request_handler', '_request_factory', '_tcp_keepalive',
|
||||||
'_keepalive_time', '_keepalive_handle', '_keepalive_timeout',
|
'_keepalive_time', '_keepalive_handle', '_keepalive_timeout',
|
||||||
'_lingering_time', '_messages', '_message_tail',
|
'_lingering_time', '_messages', '_message_tail',
|
||||||
@ -411,14 +411,14 @@ class RequestHandler(BaseProtocol):
|
|||||||
request = self._request_factory(
|
request = self._request_factory(
|
||||||
message, payload, self, writer, handler)
|
message, payload, self, writer, handler)
|
||||||
try:
|
try:
|
||||||
|
# a new task is used for copy context vars (#3406)
|
||||||
|
task = self._loop.create_task(
|
||||||
|
self._request_handler(request))
|
||||||
try:
|
try:
|
||||||
# a new task is used for copy context vars (#3406)
|
|
||||||
task = self._loop.create_task(
|
|
||||||
self._request_handler(request))
|
|
||||||
resp = await task
|
resp = await task
|
||||||
except HTTPException as exc:
|
except HTTPException as exc:
|
||||||
resp = exc
|
resp = exc
|
||||||
except asyncio.CancelledError:
|
except (asyncio.CancelledError, ConnectionError):
|
||||||
self.log_debug('Ignored premature client disconnection')
|
self.log_debug('Ignored premature client disconnection')
|
||||||
break
|
break
|
||||||
except asyncio.TimeoutError as exc:
|
except asyncio.TimeoutError as exc:
|
||||||
@ -435,6 +435,9 @@ class RequestHandler(BaseProtocol):
|
|||||||
"please raise the exception instead",
|
"please raise the exception instead",
|
||||||
DeprecationWarning)
|
DeprecationWarning)
|
||||||
|
|
||||||
|
# Drop the processed task from asyncio.Task.all_tasks() early
|
||||||
|
del task
|
||||||
|
|
||||||
if self.debug:
|
if self.debug:
|
||||||
if not isinstance(resp, StreamResponse):
|
if not isinstance(resp, StreamResponse):
|
||||||
if resp is None:
|
if resp is None:
|
||||||
@ -444,8 +447,22 @@ class RequestHandler(BaseProtocol):
|
|||||||
raise RuntimeError("Web-handler should return "
|
raise RuntimeError("Web-handler should return "
|
||||||
"a response instance, "
|
"a response instance, "
|
||||||
"got {!r}".format(resp))
|
"got {!r}".format(resp))
|
||||||
await resp.prepare(request)
|
try:
|
||||||
await resp.write_eof()
|
prepare_meth = resp.prepare
|
||||||
|
except AttributeError:
|
||||||
|
if resp is None:
|
||||||
|
raise RuntimeError("Missing return "
|
||||||
|
"statement on request handler")
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Web-handler should return "
|
||||||
|
"a response instance, "
|
||||||
|
"got {!r}".format(resp))
|
||||||
|
try:
|
||||||
|
await prepare_meth(request)
|
||||||
|
await resp.write_eof()
|
||||||
|
except ConnectionError:
|
||||||
|
self.log_debug('Ignored premature client disconnection 2')
|
||||||
|
break
|
||||||
|
|
||||||
# notify server about keep-alive
|
# notify server about keep-alive
|
||||||
self._keepalive = bool(resp.keep_alive)
|
self._keepalive = bool(resp.keep_alive)
|
||||||
|
@ -32,7 +32,7 @@ from . import hdrs
|
|||||||
from .abc import AbstractStreamWriter
|
from .abc import AbstractStreamWriter
|
||||||
from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel
|
from .helpers import DEBUG, ChainMapProxy, HeadersMixin, reify, sentinel
|
||||||
from .http_parser import RawRequestMessage
|
from .http_parser import RawRequestMessage
|
||||||
from .multipart import MultipartReader
|
from .multipart import BodyPartReader, MultipartReader
|
||||||
from .streams import EmptyStreamReader, StreamReader
|
from .streams import EmptyStreamReader, StreamReader
|
||||||
from .typedefs import (
|
from .typedefs import (
|
||||||
DEFAULT_JSON_DECODER,
|
DEFAULT_JSON_DECODER,
|
||||||
@ -611,41 +611,49 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||||||
field = await multipart.next()
|
field = await multipart.next()
|
||||||
while field is not None:
|
while field is not None:
|
||||||
size = 0
|
size = 0
|
||||||
content_type = field.headers.get(hdrs.CONTENT_TYPE)
|
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
|
||||||
|
|
||||||
if field.filename:
|
if isinstance(field, BodyPartReader):
|
||||||
# store file in temp file
|
if field.filename and field_ct:
|
||||||
tmp = tempfile.TemporaryFile()
|
# store file in temp file
|
||||||
chunk = await field.read_chunk(size=2**16)
|
tmp = tempfile.TemporaryFile()
|
||||||
while chunk:
|
chunk = await field.read_chunk(size=2**16)
|
||||||
chunk = field.decode(chunk)
|
while chunk:
|
||||||
tmp.write(chunk)
|
chunk = field.decode(chunk)
|
||||||
size += len(chunk)
|
tmp.write(chunk)
|
||||||
|
size += len(chunk)
|
||||||
|
if 0 < max_size < size:
|
||||||
|
raise HTTPRequestEntityTooLarge(
|
||||||
|
max_size=max_size,
|
||||||
|
actual_size=size
|
||||||
|
)
|
||||||
|
chunk = await field.read_chunk(size=2**16)
|
||||||
|
tmp.seek(0)
|
||||||
|
|
||||||
|
ff = FileField(field.name, field.filename,
|
||||||
|
cast(io.BufferedReader, tmp),
|
||||||
|
field_ct, field.headers)
|
||||||
|
out.add(field.name, ff)
|
||||||
|
else:
|
||||||
|
# deal with ordinary data
|
||||||
|
value = await field.read(decode=True)
|
||||||
|
if field_ct is None or \
|
||||||
|
field_ct.startswith('text/'):
|
||||||
|
charset = field.get_charset(default='utf-8')
|
||||||
|
out.add(field.name, value.decode(charset))
|
||||||
|
else:
|
||||||
|
out.add(field.name, value)
|
||||||
|
size += len(value)
|
||||||
if 0 < max_size < size:
|
if 0 < max_size < size:
|
||||||
raise HTTPRequestEntityTooLarge(
|
raise HTTPRequestEntityTooLarge(
|
||||||
max_size=max_size,
|
max_size=max_size,
|
||||||
actual_size=size
|
actual_size=size
|
||||||
)
|
)
|
||||||
chunk = await field.read_chunk(size=2**16)
|
|
||||||
tmp.seek(0)
|
|
||||||
|
|
||||||
ff = FileField(field.name, field.filename,
|
|
||||||
cast(io.BufferedReader, tmp),
|
|
||||||
content_type, field.headers)
|
|
||||||
out.add(field.name, ff)
|
|
||||||
else:
|
else:
|
||||||
value = await field.read(decode=True)
|
raise ValueError(
|
||||||
if content_type is None or \
|
'To decode nested multipart you need '
|
||||||
content_type.startswith('text/'):
|
'to use custom reader',
|
||||||
charset = field.get_charset(default='utf-8')
|
)
|
||||||
value = value.decode(charset)
|
|
||||||
out.add(field.name, value)
|
|
||||||
size += len(value)
|
|
||||||
if 0 < max_size < size:
|
|
||||||
raise HTTPRequestEntityTooLarge(
|
|
||||||
max_size=max_size,
|
|
||||||
actual_size=size
|
|
||||||
)
|
|
||||||
|
|
||||||
field = await multipart.next()
|
field = await multipart.next()
|
||||||
else:
|
else:
|
||||||
@ -670,6 +678,9 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
|||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
return id(self) == id(other)
|
return id(self) == id(other)
|
||||||
|
|
||||||
|
def __bool__(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
async def _prepare_hook(self, response: StreamResponse) -> None:
|
async def _prepare_hook(self, response: StreamResponse) -> None:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ class StreamResponse(BaseClass, HeadersMixin):
|
|||||||
if headers is not None:
|
if headers is not None:
|
||||||
self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
||||||
else:
|
else:
|
||||||
self._headers = CIMultiDict() # type: CIMultiDict[str]
|
self._headers = CIMultiDict()
|
||||||
|
|
||||||
self.set_status(status, reason)
|
self.set_status(status, reason)
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ from typing import (
|
|||||||
List,
|
List,
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
Type,
|
||||||
Union,
|
Union,
|
||||||
overload,
|
overload,
|
||||||
)
|
)
|
||||||
@ -40,7 +41,7 @@ class AbstractRouteDef(abc.ABC):
|
|||||||
|
|
||||||
|
|
||||||
_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]]
|
_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]]
|
||||||
_HandlerType = Union[AbstractView, _SimpleHandler]
|
_HandlerType = Union[Type[AbstractView], _SimpleHandler]
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True, repr=False, slots=True)
|
@attr.s(frozen=True, repr=False, slots=True)
|
||||||
@ -120,7 +121,7 @@ def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
|||||||
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def view(path: str, handler: AbstractView, **kwargs: Any) -> RouteDef:
|
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
|
||||||
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,8 +15,8 @@ except ImportError:
|
|||||||
SSLContext = object # type: ignore
|
SSLContext = object # type: ignore
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('BaseSite', 'TCPSite', 'UnixSite', 'SockSite', 'BaseRunner',
|
__all__ = ('BaseSite', 'TCPSite', 'UnixSite', 'NamedPipeSite', 'SockSite',
|
||||||
'AppRunner', 'ServerRunner', 'GracefulExit')
|
'BaseRunner', 'AppRunner', 'ServerRunner', 'GracefulExit')
|
||||||
|
|
||||||
|
|
||||||
class GracefulExit(SystemExit):
|
class GracefulExit(SystemExit):
|
||||||
@ -58,7 +58,9 @@ class BaseSite(ABC):
|
|||||||
self._runner._unreg_site(self)
|
self._runner._unreg_site(self)
|
||||||
return # not started yet
|
return # not started yet
|
||||||
self._server.close()
|
self._server.close()
|
||||||
await self._server.wait_closed()
|
# named pipes do not have wait_closed property
|
||||||
|
if hasattr(self._server, 'wait_closed'):
|
||||||
|
await self._server.wait_closed()
|
||||||
await self._runner.shutdown()
|
await self._runner.shutdown()
|
||||||
assert self._runner.server
|
assert self._runner.server
|
||||||
await self._runner.server.shutdown(self._shutdown_timeout)
|
await self._runner.server.shutdown(self._shutdown_timeout)
|
||||||
@ -95,7 +97,7 @@ class TCPSite(BaseSite):
|
|||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
server = self._runner.server
|
server = self._runner.server
|
||||||
assert server is not None
|
assert server is not None
|
||||||
self._server = await loop.create_server(
|
self._server = await loop.create_server( # type: ignore
|
||||||
server, self._host, self._port,
|
server, self._host, self._port,
|
||||||
ssl=self._ssl_context, backlog=self._backlog,
|
ssl=self._ssl_context, backlog=self._backlog,
|
||||||
reuse_address=self._reuse_address,
|
reuse_address=self._reuse_address,
|
||||||
@ -128,6 +130,33 @@ class UnixSite(BaseSite):
|
|||||||
ssl=self._ssl_context, backlog=self._backlog)
|
ssl=self._ssl_context, backlog=self._backlog)
|
||||||
|
|
||||||
|
|
||||||
|
class NamedPipeSite(BaseSite):
|
||||||
|
__slots__ = ('_path', )
|
||||||
|
|
||||||
|
def __init__(self, runner: 'BaseRunner', path: str, *,
|
||||||
|
shutdown_timeout: float=60.0) -> None:
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
if not isinstance(loop, asyncio.ProactorEventLoop): # type: ignore
|
||||||
|
raise RuntimeError("Named Pipes only available in proactor"
|
||||||
|
"loop under windows")
|
||||||
|
super().__init__(runner, shutdown_timeout=shutdown_timeout)
|
||||||
|
self._path = path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._path
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
await super().start()
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
server = self._runner.server
|
||||||
|
assert server is not None
|
||||||
|
_server = await loop.start_serving_pipe( # type: ignore
|
||||||
|
server, self._path
|
||||||
|
)
|
||||||
|
self._server = _server[0]
|
||||||
|
|
||||||
|
|
||||||
class SockSite(BaseSite):
|
class SockSite(BaseSite):
|
||||||
__slots__ = ('_sock', '_name')
|
__slots__ = ('_sock', '_name')
|
||||||
|
|
||||||
@ -155,7 +184,7 @@ class SockSite(BaseSite):
|
|||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
server = self._runner.server
|
server = self._runner.server
|
||||||
assert server is not None
|
assert server is not None
|
||||||
self._server = await loop.create_server(
|
self._server = await loop.create_server( # type: ignore
|
||||||
server, sock=self._sock,
|
server, sock=self._sock,
|
||||||
ssl=self._ssl_context, backlog=self._backlog)
|
ssl=self._ssl_context, backlog=self._backlog)
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ from typing import ( # noqa
|
|||||||
Set,
|
Set,
|
||||||
Sized,
|
Sized,
|
||||||
Tuple,
|
Tuple,
|
||||||
|
Type,
|
||||||
Union,
|
Union,
|
||||||
cast,
|
cast,
|
||||||
)
|
)
|
||||||
@ -123,7 +124,7 @@ class AbstractResource(Sized, Iterable['AbstractRoute']):
|
|||||||
class AbstractRoute(abc.ABC):
|
class AbstractRoute(abc.ABC):
|
||||||
|
|
||||||
def __init__(self, method: str,
|
def __init__(self, method: str,
|
||||||
handler: Union[_WebHandler, AbstractView], *,
|
handler: Union[_WebHandler, Type[AbstractView]], *,
|
||||||
expect_handler: _ExpectHandler=None,
|
expect_handler: _ExpectHandler=None,
|
||||||
resource: AbstractResource=None) -> None:
|
resource: AbstractResource=None) -> None:
|
||||||
|
|
||||||
@ -154,8 +155,8 @@ class AbstractRoute(abc.ABC):
|
|||||||
async def handler_wrapper(request: Request) -> StreamResponse:
|
async def handler_wrapper(request: Request) -> StreamResponse:
|
||||||
result = old_handler(request)
|
result = old_handler(request)
|
||||||
if asyncio.iscoroutine(result):
|
if asyncio.iscoroutine(result):
|
||||||
result = await result
|
return await result
|
||||||
return result
|
return result # type: ignore
|
||||||
old_handler = handler
|
old_handler = handler
|
||||||
handler = handler_wrapper
|
handler = handler_wrapper
|
||||||
|
|
||||||
@ -296,7 +297,7 @@ class Resource(AbstractResource):
|
|||||||
self._routes = [] # type: List[ResourceRoute]
|
self._routes = [] # type: List[ResourceRoute]
|
||||||
|
|
||||||
def add_route(self, method: str,
|
def add_route(self, method: str,
|
||||||
handler: Union[AbstractView, _WebHandler], *,
|
handler: Union[Type[AbstractView], _WebHandler], *,
|
||||||
expect_handler: Optional[_ExpectHandler]=None
|
expect_handler: Optional[_ExpectHandler]=None
|
||||||
) -> 'ResourceRoute':
|
) -> 'ResourceRoute':
|
||||||
|
|
||||||
@ -500,7 +501,7 @@ class StaticResource(PrefixResource):
|
|||||||
expect_handler: Optional[_ExpectHandler]=None,
|
expect_handler: Optional[_ExpectHandler]=None,
|
||||||
chunk_size: int=256 * 1024,
|
chunk_size: int=256 * 1024,
|
||||||
show_index: bool=False, follow_symlinks: bool=False,
|
show_index: bool=False, follow_symlinks: bool=False,
|
||||||
append_version: bool=False)-> None:
|
append_version: bool=False) -> None:
|
||||||
super().__init__(prefix, name=name)
|
super().__init__(prefix, name=name)
|
||||||
try:
|
try:
|
||||||
directory = Path(directory)
|
directory = Path(directory)
|
||||||
@ -698,7 +699,8 @@ class PrefixedSubAppResource(PrefixResource):
|
|||||||
'prefix': self._prefix}
|
'prefix': self._prefix}
|
||||||
|
|
||||||
async def resolve(self, request: Request) -> _Resolve:
|
async def resolve(self, request: Request) -> _Resolve:
|
||||||
if not request.url.raw_path.startswith(self._prefix):
|
if not request.url.raw_path.startswith(self._prefix + '/') and \
|
||||||
|
request.url.raw_path != self._prefix:
|
||||||
return None, set()
|
return None, set()
|
||||||
match_info = await self._app.router.resolve(request)
|
match_info = await self._app.router.resolve(request)
|
||||||
match_info.add_app(self._app)
|
match_info.add_app(self._app)
|
||||||
@ -825,7 +827,7 @@ class ResourceRoute(AbstractRoute):
|
|||||||
"""A route with resource"""
|
"""A route with resource"""
|
||||||
|
|
||||||
def __init__(self, method: str,
|
def __init__(self, method: str,
|
||||||
handler: Union[_WebHandler, AbstractView],
|
handler: Union[_WebHandler, Type[AbstractView]],
|
||||||
resource: AbstractResource, *,
|
resource: AbstractResource, *,
|
||||||
expect_handler: Optional[_ExpectHandler]=None) -> None:
|
expect_handler: Optional[_ExpectHandler]=None) -> None:
|
||||||
super().__init__(method, handler, expect_handler=expect_handler,
|
super().__init__(method, handler, expect_handler=expect_handler,
|
||||||
@ -1025,7 +1027,7 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
|||||||
return resource
|
return resource
|
||||||
|
|
||||||
def add_route(self, method: str, path: str,
|
def add_route(self, method: str, path: str,
|
||||||
handler: Union[_WebHandler, AbstractView],
|
handler: Union[_WebHandler, Type[AbstractView]],
|
||||||
*, name: Optional[str]=None,
|
*, name: Optional[str]=None,
|
||||||
expect_handler: Optional[_ExpectHandler]=None
|
expect_handler: Optional[_ExpectHandler]=None
|
||||||
) -> AbstractRoute:
|
) -> AbstractRoute:
|
||||||
@ -1112,7 +1114,7 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
|||||||
"""
|
"""
|
||||||
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
|
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
|
||||||
|
|
||||||
def add_view(self, path: str, handler: AbstractView,
|
def add_view(self, path: str, handler: Type[AbstractView],
|
||||||
**kwargs: Any) -> AbstractRoute:
|
**kwargs: Any) -> AbstractRoute:
|
||||||
"""
|
"""
|
||||||
Shortcut for add_route with ANY methods for a class-based view
|
Shortcut for add_route with ANY methods for a class-based view
|
||||||
|
@ -20,14 +20,13 @@ from .http import (
|
|||||||
WebSocketReader,
|
WebSocketReader,
|
||||||
WebSocketWriter,
|
WebSocketWriter,
|
||||||
WSMessage,
|
WSMessage,
|
||||||
WSMsgType,
|
|
||||||
ws_ext_gen,
|
|
||||||
ws_ext_parse,
|
|
||||||
)
|
)
|
||||||
|
from .http import WSMsgType as WSMsgType
|
||||||
|
from .http import ws_ext_gen, ws_ext_parse
|
||||||
from .log import ws_logger
|
from .log import ws_logger
|
||||||
from .streams import EofStream, FlowControlDataQueue
|
from .streams import EofStream, FlowControlDataQueue
|
||||||
from .typedefs import JSONDecoder, JSONEncoder
|
from .typedefs import JSONDecoder, JSONEncoder
|
||||||
from .web_exceptions import HTTPBadRequest, HTTPException, HTTPMethodNotAllowed
|
from .web_exceptions import HTTPBadRequest, HTTPException
|
||||||
from .web_request import BaseRequest
|
from .web_request import BaseRequest
|
||||||
from .web_response import StreamResponse
|
from .web_response import StreamResponse
|
||||||
|
|
||||||
@ -47,6 +46,8 @@ class WebSocketReady:
|
|||||||
|
|
||||||
class WebSocketResponse(StreamResponse):
|
class WebSocketResponse(StreamResponse):
|
||||||
|
|
||||||
|
_length_check = False
|
||||||
|
|
||||||
def __init__(self, *,
|
def __init__(self, *,
|
||||||
timeout: float=10.0, receive_timeout: Optional[float]=None,
|
timeout: float=10.0, receive_timeout: Optional[float]=None,
|
||||||
autoclose: bool=True, autoping: bool=True,
|
autoclose: bool=True, autoping: bool=True,
|
||||||
@ -129,8 +130,6 @@ class WebSocketResponse(StreamResponse):
|
|||||||
bool,
|
bool,
|
||||||
bool]:
|
bool]:
|
||||||
headers = request.headers
|
headers = request.headers
|
||||||
if request.method != hdrs.METH_GET:
|
|
||||||
raise HTTPMethodNotAllowed(request.method, [hdrs.METH_GET])
|
|
||||||
if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip():
|
if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip():
|
||||||
raise HTTPBadRequest(
|
raise HTTPBadRequest(
|
||||||
text=('No WebSocket UPGRADE hdr: {}\n Can '
|
text=('No WebSocket UPGRADE hdr: {}\n Can '
|
||||||
@ -179,7 +178,6 @@ class WebSocketResponse(StreamResponse):
|
|||||||
response_headers = CIMultiDict( # type: ignore
|
response_headers = CIMultiDict( # type: ignore
|
||||||
{hdrs.UPGRADE: 'websocket',
|
{hdrs.UPGRADE: 'websocket',
|
||||||
hdrs.CONNECTION: 'upgrade',
|
hdrs.CONNECTION: 'upgrade',
|
||||||
hdrs.TRANSFER_ENCODING: 'chunked',
|
|
||||||
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val})
|
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val})
|
||||||
|
|
||||||
notakeover = False
|
notakeover = False
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from . import converters, exceptions, filters, validators
|
from . import converters, exceptions, filters, setters, validators
|
||||||
from ._config import get_run_validators, set_run_validators
|
from ._config import get_run_validators, set_run_validators
|
||||||
from ._funcs import asdict, assoc, astuple, evolve, has
|
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
|
||||||
from ._make import (
|
from ._make import (
|
||||||
NOTHING,
|
NOTHING,
|
||||||
Attribute,
|
Attribute,
|
||||||
@ -19,7 +21,7 @@ from ._make import (
|
|||||||
from ._version_info import VersionInfo
|
from ._version_info import VersionInfo
|
||||||
|
|
||||||
|
|
||||||
__version__ = "19.3.0"
|
__version__ = "20.3.0"
|
||||||
__version_info__ = VersionInfo._from_version_string(__version__)
|
__version_info__ = VersionInfo._from_version_string(__version__)
|
||||||
|
|
||||||
__title__ = "attrs"
|
__title__ = "attrs"
|
||||||
@ -39,7 +41,6 @@ s = attributes = attrs
|
|||||||
ib = attr = attrib
|
ib = attr = attrib
|
||||||
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Attribute",
|
"Attribute",
|
||||||
"Factory",
|
"Factory",
|
||||||
@ -61,8 +62,15 @@ __all__ = [
|
|||||||
"has",
|
"has",
|
||||||
"ib",
|
"ib",
|
||||||
"make_class",
|
"make_class",
|
||||||
|
"resolve_types",
|
||||||
"s",
|
"s",
|
||||||
"set_run_validators",
|
"set_run_validators",
|
||||||
|
"setters",
|
||||||
"validate",
|
"validate",
|
||||||
"validators",
|
"validators",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if sys.version_info[:2] >= (3, 6):
|
||||||
|
from ._next_gen import define, field, frozen, mutable
|
||||||
|
|
||||||
|
__all__.extend((define, field, frozen, mutable))
|
||||||
|
@ -18,6 +18,7 @@ from typing import (
|
|||||||
from . import exceptions as exceptions
|
from . import exceptions as exceptions
|
||||||
from . import filters as filters
|
from . import filters as filters
|
||||||
from . import converters as converters
|
from . import converters as converters
|
||||||
|
from . import setters as setters
|
||||||
from . import validators as validators
|
from . import validators as validators
|
||||||
|
|
||||||
from ._version_info import VersionInfo
|
from ._version_info import VersionInfo
|
||||||
@ -37,20 +38,26 @@ _T = TypeVar("_T")
|
|||||||
_C = TypeVar("_C", bound=type)
|
_C = TypeVar("_C", bound=type)
|
||||||
|
|
||||||
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
||||||
_ConverterType = Callable[[Any], _T]
|
_ConverterType = Callable[[Any], Any]
|
||||||
_FilterType = Callable[[Attribute[_T], _T], bool]
|
_FilterType = Callable[[Attribute[_T], _T], bool]
|
||||||
_ReprType = Callable[[Any], str]
|
_ReprType = Callable[[Any], str]
|
||||||
_ReprArgType = Union[bool, _ReprType]
|
_ReprArgType = Union[bool, _ReprType]
|
||||||
# FIXME: in reality, if multiple validators are passed they must be in a list or tuple,
|
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
|
||||||
# but those are invariant and so would prevent subtypes of _ValidatorType from working
|
_OnSetAttrArgType = Union[
|
||||||
# when passed in a list or tuple.
|
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
|
||||||
|
]
|
||||||
|
_FieldTransformer = Callable[[type, List[Attribute]], List[Attribute]]
|
||||||
|
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||||
|
# or tuple, but those are invariant and so would prevent subtypes of
|
||||||
|
# _ValidatorType from working when passed in a list or tuple.
|
||||||
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
||||||
|
|
||||||
# _make --
|
# _make --
|
||||||
|
|
||||||
NOTHING: object
|
NOTHING: object
|
||||||
|
|
||||||
# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)`
|
# NOTE: Factory lies about its return type to make this possible:
|
||||||
|
# `x: List[int] # = Factory(list)`
|
||||||
# Work around mypy issue #4554 in the common case by using an overload.
|
# Work around mypy issue #4554 in the common case by using an overload.
|
||||||
@overload
|
@overload
|
||||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||||
@ -70,16 +77,17 @@ class Attribute(Generic[_T]):
|
|||||||
order: bool
|
order: bool
|
||||||
hash: Optional[bool]
|
hash: Optional[bool]
|
||||||
init: bool
|
init: bool
|
||||||
converter: Optional[_ConverterType[_T]]
|
converter: Optional[_ConverterType]
|
||||||
metadata: Dict[Any, Any]
|
metadata: Dict[Any, Any]
|
||||||
type: Optional[Type[_T]]
|
type: Optional[Type[_T]]
|
||||||
kw_only: bool
|
kw_only: bool
|
||||||
|
on_setattr: _OnSetAttrType
|
||||||
|
|
||||||
# NOTE: We had several choices for the annotation to use for type arg:
|
# NOTE: We had several choices for the annotation to use for type arg:
|
||||||
# 1) Type[_T]
|
# 1) Type[_T]
|
||||||
# - Pros: Handles simple cases correctly
|
# - Pros: Handles simple cases correctly
|
||||||
# - Cons: Might produce less informative errors in the case of conflicting TypeVars
|
# - Cons: Might produce less informative errors in the case of conflicting
|
||||||
# e.g. `attr.ib(default='bad', type=int)`
|
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||||
# 2) Callable[..., _T]
|
# 2) Callable[..., _T]
|
||||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||||
# - Cons: Terrible error messages for validator checks.
|
# - Cons: Terrible error messages for validator checks.
|
||||||
@ -97,7 +105,8 @@ class Attribute(Generic[_T]):
|
|||||||
# This makes this type of assignments possible:
|
# This makes this type of assignments possible:
|
||||||
# x: int = attr(8)
|
# x: int = attr(8)
|
||||||
#
|
#
|
||||||
# This form catches explicit None or no default but with no other arguments returns Any.
|
# This form catches explicit None or no default but with no other arguments
|
||||||
|
# returns Any.
|
||||||
@overload
|
@overload
|
||||||
def attrib(
|
def attrib(
|
||||||
default: None = ...,
|
default: None = ...,
|
||||||
@ -113,9 +122,11 @@ def attrib(
|
|||||||
kw_only: bool = ...,
|
kw_only: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
) -> Any: ...
|
) -> Any: ...
|
||||||
|
|
||||||
# This form catches an explicit None or no default and infers the type from the other arguments.
|
# This form catches an explicit None or no default and infers the type from the
|
||||||
|
# other arguments.
|
||||||
@overload
|
@overload
|
||||||
def attrib(
|
def attrib(
|
||||||
default: None = ...,
|
default: None = ...,
|
||||||
@ -126,11 +137,12 @@ def attrib(
|
|||||||
init: bool = ...,
|
init: bool = ...,
|
||||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
type: Optional[Type[_T]] = ...,
|
type: Optional[Type[_T]] = ...,
|
||||||
converter: Optional[_ConverterType[_T]] = ...,
|
converter: Optional[_ConverterType] = ...,
|
||||||
factory: Optional[Callable[[], _T]] = ...,
|
factory: Optional[Callable[[], _T]] = ...,
|
||||||
kw_only: bool = ...,
|
kw_only: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
) -> _T: ...
|
) -> _T: ...
|
||||||
|
|
||||||
# This form catches an explicit default argument.
|
# This form catches an explicit default argument.
|
||||||
@ -144,11 +156,12 @@ def attrib(
|
|||||||
init: bool = ...,
|
init: bool = ...,
|
||||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
type: Optional[Type[_T]] = ...,
|
type: Optional[Type[_T]] = ...,
|
||||||
converter: Optional[_ConverterType[_T]] = ...,
|
converter: Optional[_ConverterType] = ...,
|
||||||
factory: Optional[Callable[[], _T]] = ...,
|
factory: Optional[Callable[[], _T]] = ...,
|
||||||
kw_only: bool = ...,
|
kw_only: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
) -> _T: ...
|
) -> _T: ...
|
||||||
|
|
||||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||||
@ -162,11 +175,83 @@ def attrib(
|
|||||||
init: bool = ...,
|
init: bool = ...,
|
||||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
type: object = ...,
|
type: object = ...,
|
||||||
converter: Optional[_ConverterType[_T]] = ...,
|
converter: Optional[_ConverterType] = ...,
|
||||||
factory: Optional[Callable[[], _T]] = ...,
|
factory: Optional[Callable[[], _T]] = ...,
|
||||||
kw_only: bool = ...,
|
kw_only: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
) -> Any: ...
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: None = ...,
|
||||||
|
validator: None = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: Optional[bool] = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
|
converter: None = ...,
|
||||||
|
factory: None = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: Optional[bool] = ...,
|
||||||
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
) -> Any: ...
|
||||||
|
|
||||||
|
# This form catches an explicit None or no default and infers the type from the
|
||||||
|
# other arguments.
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: None = ...,
|
||||||
|
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: Optional[bool] = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
|
converter: Optional[_ConverterType] = ...,
|
||||||
|
factory: Optional[Callable[[], _T]] = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: Optional[bool] = ...,
|
||||||
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
# This form catches an explicit default argument.
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: _T,
|
||||||
|
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: Optional[bool] = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
|
converter: Optional[_ConverterType] = ...,
|
||||||
|
factory: Optional[Callable[[], _T]] = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: Optional[bool] = ...,
|
||||||
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
) -> _T: ...
|
||||||
|
|
||||||
|
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||||
|
@overload
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default: Optional[_T] = ...,
|
||||||
|
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||||
|
repr: _ReprArgType = ...,
|
||||||
|
hash: Optional[bool] = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||||
|
converter: Optional[_ConverterType] = ...,
|
||||||
|
factory: Optional[Callable[[], _T]] = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
eq: Optional[bool] = ...,
|
||||||
|
order: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
) -> Any: ...
|
) -> Any: ...
|
||||||
@overload
|
@overload
|
||||||
def attrs(
|
def attrs(
|
||||||
@ -187,6 +272,11 @@ def attrs(
|
|||||||
auto_exc: bool = ...,
|
auto_exc: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
collect_by_mro: bool = ...,
|
||||||
|
getstate_setstate: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
field_transformer: Optional[_FieldTransformer] = ...,
|
||||||
) -> _C: ...
|
) -> _C: ...
|
||||||
@overload
|
@overload
|
||||||
def attrs(
|
def attrs(
|
||||||
@ -207,7 +297,61 @@ def attrs(
|
|||||||
auto_exc: bool = ...,
|
auto_exc: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
collect_by_mro: bool = ...,
|
||||||
|
getstate_setstate: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
field_transformer: Optional[_FieldTransformer] = ...,
|
||||||
) -> Callable[[_C], _C]: ...
|
) -> Callable[[_C], _C]: ...
|
||||||
|
@overload
|
||||||
|
def define(
|
||||||
|
maybe_cls: _C,
|
||||||
|
*,
|
||||||
|
these: Optional[Dict[str, Any]] = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
hash: Optional[bool] = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: Optional[bool] = ...,
|
||||||
|
order: Optional[bool] = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
getstate_setstate: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
field_transformer: Optional[_FieldTransformer] = ...,
|
||||||
|
) -> _C: ...
|
||||||
|
@overload
|
||||||
|
def define(
|
||||||
|
maybe_cls: None = ...,
|
||||||
|
*,
|
||||||
|
these: Optional[Dict[str, Any]] = ...,
|
||||||
|
repr: bool = ...,
|
||||||
|
hash: Optional[bool] = ...,
|
||||||
|
init: bool = ...,
|
||||||
|
slots: bool = ...,
|
||||||
|
frozen: bool = ...,
|
||||||
|
weakref_slot: bool = ...,
|
||||||
|
str: bool = ...,
|
||||||
|
auto_attribs: bool = ...,
|
||||||
|
kw_only: bool = ...,
|
||||||
|
cache_hash: bool = ...,
|
||||||
|
auto_exc: bool = ...,
|
||||||
|
eq: Optional[bool] = ...,
|
||||||
|
order: Optional[bool] = ...,
|
||||||
|
auto_detect: bool = ...,
|
||||||
|
getstate_setstate: Optional[bool] = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
field_transformer: Optional[_FieldTransformer] = ...,
|
||||||
|
) -> Callable[[_C], _C]: ...
|
||||||
|
|
||||||
|
mutable = define
|
||||||
|
frozen = define # they differ only in their defaults
|
||||||
|
|
||||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||||
class _Fields(Tuple[Attribute[Any], ...]):
|
class _Fields(Tuple[Attribute[Any], ...]):
|
||||||
@ -216,9 +360,15 @@ class _Fields(Tuple[Attribute[Any], ...]):
|
|||||||
def fields(cls: type) -> _Fields: ...
|
def fields(cls: type) -> _Fields: ...
|
||||||
def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
|
def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
|
||||||
def validate(inst: Any) -> None: ...
|
def validate(inst: Any) -> None: ...
|
||||||
|
def resolve_types(
|
||||||
|
cls: _C,
|
||||||
|
globalns: Optional[Dict[str, Any]] = ...,
|
||||||
|
localns: Optional[Dict[str, Any]] = ...,
|
||||||
|
) -> _C: ...
|
||||||
|
|
||||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid
|
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||||
|
# [attr.ib()])` is valid
|
||||||
def make_class(
|
def make_class(
|
||||||
name: str,
|
name: str,
|
||||||
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
||||||
@ -238,12 +388,16 @@ def make_class(
|
|||||||
auto_exc: bool = ...,
|
auto_exc: bool = ...,
|
||||||
eq: Optional[bool] = ...,
|
eq: Optional[bool] = ...,
|
||||||
order: Optional[bool] = ...,
|
order: Optional[bool] = ...,
|
||||||
|
collect_by_mro: bool = ...,
|
||||||
|
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||||
|
field_transformer: Optional[_FieldTransformer] = ...,
|
||||||
) -> type: ...
|
) -> type: ...
|
||||||
|
|
||||||
# _funcs --
|
# _funcs --
|
||||||
|
|
||||||
# TODO: add support for returning TypedDict from the mypy plugin
|
# TODO: add support for returning TypedDict from the mypy plugin
|
||||||
# FIXME: asdict/astuple do not honor their factory args. waiting on one of these:
|
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||||
|
# these:
|
||||||
# https://github.com/python/mypy/issues/4236
|
# https://github.com/python/mypy/issues/4236
|
||||||
# https://github.com/python/typing/issues/253
|
# https://github.com/python/typing/issues/253
|
||||||
def asdict(
|
def asdict(
|
||||||
@ -252,6 +406,7 @@ def asdict(
|
|||||||
filter: Optional[_FilterType[Any]] = ...,
|
filter: Optional[_FilterType[Any]] = ...,
|
||||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||||
retain_collection_types: bool = ...,
|
retain_collection_types: bool = ...,
|
||||||
|
value_serializer: Optional[Callable[[type, Attribute, Any], Any]] = ...,
|
||||||
) -> Dict[str, Any]: ...
|
) -> Dict[str, Any]: ...
|
||||||
|
|
||||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||||
|
@ -19,9 +19,10 @@ else:
|
|||||||
|
|
||||||
|
|
||||||
if PY2:
|
if PY2:
|
||||||
from UserDict import IterableUserDict
|
|
||||||
from collections import Mapping, Sequence
|
from collections import Mapping, Sequence
|
||||||
|
|
||||||
|
from UserDict import IterableUserDict
|
||||||
|
|
||||||
# We 'bundle' isclass instead of using inspect as importing inspect is
|
# We 'bundle' isclass instead of using inspect as importing inspect is
|
||||||
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
|
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
|
||||||
def isclass(klass):
|
def isclass(klass):
|
||||||
@ -90,7 +91,7 @@ if PY2:
|
|||||||
res.data.update(d) # We blocked update, so we have to do it like this.
|
res.data.update(d) # We blocked update, so we have to do it like this.
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def just_warn(*args, **kw): # pragma: nocover
|
def just_warn(*args, **kw): # pragma: no cover
|
||||||
"""
|
"""
|
||||||
We only warn on Python 3 because we are not aware of any concrete
|
We only warn on Python 3 because we are not aware of any concrete
|
||||||
consequences of not setting the cell on Python 2.
|
consequences of not setting the cell on Python 2.
|
||||||
@ -131,7 +132,7 @@ def make_set_closure_cell():
|
|||||||
"""
|
"""
|
||||||
# pypy makes this easy. (It also supports the logic below, but
|
# pypy makes this easy. (It also supports the logic below, but
|
||||||
# why not do the easy/fast thing?)
|
# why not do the easy/fast thing?)
|
||||||
if PYPY: # pragma: no cover
|
if PYPY:
|
||||||
|
|
||||||
def set_closure_cell(cell, value):
|
def set_closure_cell(cell, value):
|
||||||
cell.__setstate__((value,))
|
cell.__setstate__((value,))
|
||||||
|
@ -13,6 +13,7 @@ def asdict(
|
|||||||
filter=None,
|
filter=None,
|
||||||
dict_factory=dict,
|
dict_factory=dict,
|
||||||
retain_collection_types=False,
|
retain_collection_types=False,
|
||||||
|
value_serializer=None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Return the ``attrs`` attribute values of *inst* as a dict.
|
Return the ``attrs`` attribute values of *inst* as a dict.
|
||||||
@ -32,6 +33,10 @@ def asdict(
|
|||||||
:param bool retain_collection_types: Do not convert to ``list`` when
|
:param bool retain_collection_types: Do not convert to ``list`` when
|
||||||
encountering an attribute whose type is ``tuple`` or ``set``. Only
|
encountering an attribute whose type is ``tuple`` or ``set``. Only
|
||||||
meaningful if ``recurse`` is ``True``.
|
meaningful if ``recurse`` is ``True``.
|
||||||
|
:param Optional[callable] value_serializer: A hook that is called for every
|
||||||
|
attribute or dict key/value. It receives the current instance, field
|
||||||
|
and value and must return the (updated) value. The hook is run *after*
|
||||||
|
the optional *filter* has been applied.
|
||||||
|
|
||||||
:rtype: return type of *dict_factory*
|
:rtype: return type of *dict_factory*
|
||||||
|
|
||||||
@ -40,6 +45,7 @@ def asdict(
|
|||||||
|
|
||||||
.. versionadded:: 16.0.0 *dict_factory*
|
.. versionadded:: 16.0.0 *dict_factory*
|
||||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||||
|
.. versionadded:: 20.3.0 *value_serializer*
|
||||||
"""
|
"""
|
||||||
attrs = fields(inst.__class__)
|
attrs = fields(inst.__class__)
|
||||||
rv = dict_factory()
|
rv = dict_factory()
|
||||||
@ -47,17 +53,30 @@ def asdict(
|
|||||||
v = getattr(inst, a.name)
|
v = getattr(inst, a.name)
|
||||||
if filter is not None and not filter(a, v):
|
if filter is not None and not filter(a, v):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if value_serializer is not None:
|
||||||
|
v = value_serializer(inst, a, v)
|
||||||
|
|
||||||
if recurse is True:
|
if recurse is True:
|
||||||
if has(v.__class__):
|
if has(v.__class__):
|
||||||
rv[a.name] = asdict(
|
rv[a.name] = asdict(
|
||||||
v, True, filter, dict_factory, retain_collection_types
|
v,
|
||||||
|
True,
|
||||||
|
filter,
|
||||||
|
dict_factory,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
)
|
)
|
||||||
elif isinstance(v, (tuple, list, set)):
|
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||||
cf = v.__class__ if retain_collection_types is True else list
|
cf = v.__class__ if retain_collection_types is True else list
|
||||||
rv[a.name] = cf(
|
rv[a.name] = cf(
|
||||||
[
|
[
|
||||||
_asdict_anything(
|
_asdict_anything(
|
||||||
i, filter, dict_factory, retain_collection_types
|
i,
|
||||||
|
filter,
|
||||||
|
dict_factory,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
)
|
)
|
||||||
for i in v
|
for i in v
|
||||||
]
|
]
|
||||||
@ -67,10 +86,18 @@ def asdict(
|
|||||||
rv[a.name] = df(
|
rv[a.name] = df(
|
||||||
(
|
(
|
||||||
_asdict_anything(
|
_asdict_anything(
|
||||||
kk, filter, df, retain_collection_types
|
kk,
|
||||||
|
filter,
|
||||||
|
df,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
),
|
),
|
||||||
_asdict_anything(
|
_asdict_anything(
|
||||||
vv, filter, df, retain_collection_types
|
vv,
|
||||||
|
filter,
|
||||||
|
df,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
for kk, vv in iteritems(v)
|
for kk, vv in iteritems(v)
|
||||||
@ -82,19 +109,36 @@ def asdict(
|
|||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def _asdict_anything(val, filter, dict_factory, retain_collection_types):
|
def _asdict_anything(
|
||||||
|
val,
|
||||||
|
filter,
|
||||||
|
dict_factory,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
``asdict`` only works on attrs instances, this works on anything.
|
``asdict`` only works on attrs instances, this works on anything.
|
||||||
"""
|
"""
|
||||||
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
||||||
# Attrs class.
|
# Attrs class.
|
||||||
rv = asdict(val, True, filter, dict_factory, retain_collection_types)
|
rv = asdict(
|
||||||
elif isinstance(val, (tuple, list, set)):
|
val,
|
||||||
|
True,
|
||||||
|
filter,
|
||||||
|
dict_factory,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
|
)
|
||||||
|
elif isinstance(val, (tuple, list, set, frozenset)):
|
||||||
cf = val.__class__ if retain_collection_types is True else list
|
cf = val.__class__ if retain_collection_types is True else list
|
||||||
rv = cf(
|
rv = cf(
|
||||||
[
|
[
|
||||||
_asdict_anything(
|
_asdict_anything(
|
||||||
i, filter, dict_factory, retain_collection_types
|
i,
|
||||||
|
filter,
|
||||||
|
dict_factory,
|
||||||
|
retain_collection_types,
|
||||||
|
value_serializer,
|
||||||
)
|
)
|
||||||
for i in val
|
for i in val
|
||||||
]
|
]
|
||||||
@ -103,13 +147,20 @@ def _asdict_anything(val, filter, dict_factory, retain_collection_types):
|
|||||||
df = dict_factory
|
df = dict_factory
|
||||||
rv = df(
|
rv = df(
|
||||||
(
|
(
|
||||||
_asdict_anything(kk, filter, df, retain_collection_types),
|
_asdict_anything(
|
||||||
_asdict_anything(vv, filter, df, retain_collection_types),
|
kk, filter, df, retain_collection_types, value_serializer
|
||||||
|
),
|
||||||
|
_asdict_anything(
|
||||||
|
vv, filter, df, retain_collection_types, value_serializer
|
||||||
|
),
|
||||||
)
|
)
|
||||||
for kk, vv in iteritems(val)
|
for kk, vv in iteritems(val)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
rv = val
|
rv = val
|
||||||
|
if value_serializer is not None:
|
||||||
|
rv = value_serializer(None, None, rv)
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
@ -164,7 +215,7 @@ def astuple(
|
|||||||
retain_collection_types=retain,
|
retain_collection_types=retain,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif isinstance(v, (tuple, list, set)):
|
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||||
cf = v.__class__ if retain is True else list
|
cf = v.__class__ if retain is True else list
|
||||||
rv.append(
|
rv.append(
|
||||||
cf(
|
cf(
|
||||||
@ -209,6 +260,7 @@ def astuple(
|
|||||||
rv.append(v)
|
rv.append(v)
|
||||||
else:
|
else:
|
||||||
rv.append(v)
|
rv.append(v)
|
||||||
|
|
||||||
return rv if tuple_factory is list else tuple_factory(rv)
|
return rv if tuple_factory is list else tuple_factory(rv)
|
||||||
|
|
||||||
|
|
||||||
@ -287,4 +339,52 @@ def evolve(inst, **changes):
|
|||||||
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
|
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
|
||||||
if init_name not in changes:
|
if init_name not in changes:
|
||||||
changes[init_name] = getattr(inst, attr_name)
|
changes[init_name] = getattr(inst, attr_name)
|
||||||
|
|
||||||
return cls(**changes)
|
return cls(**changes)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_types(cls, globalns=None, localns=None):
|
||||||
|
"""
|
||||||
|
Resolve any strings and forward annotations in type annotations.
|
||||||
|
|
||||||
|
This is only required if you need concrete types in `Attribute`'s *type*
|
||||||
|
field. In other words, you don't need to resolve your types if you only
|
||||||
|
use them for static type checking.
|
||||||
|
|
||||||
|
With no arguments, names will be looked up in the module in which the class
|
||||||
|
was created. If this is not what you want, e.g. if the name only exists
|
||||||
|
inside a method, you may pass *globalns* or *localns* to specify other
|
||||||
|
dictionaries in which to look up these names. See the docs of
|
||||||
|
`typing.get_type_hints` for more details.
|
||||||
|
|
||||||
|
:param type cls: Class to resolve.
|
||||||
|
:param Optional[dict] globalns: Dictionary containing global variables.
|
||||||
|
:param Optional[dict] localns: Dictionary containing local variables.
|
||||||
|
|
||||||
|
:raise TypeError: If *cls* is not a class.
|
||||||
|
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||||
|
class.
|
||||||
|
:raise NameError: If types cannot be resolved because of missing variables.
|
||||||
|
|
||||||
|
:returns: *cls* so you can use this function also as a class decorator.
|
||||||
|
Please note that you have to apply it **after** `attr.s`. That means
|
||||||
|
the decorator has to come in the line **before** `attr.s`.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Since calling get_type_hints is expensive we cache whether we've
|
||||||
|
# done it already.
|
||||||
|
cls.__attrs_types_resolved__
|
||||||
|
except AttributeError:
|
||||||
|
import typing
|
||||||
|
|
||||||
|
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
|
||||||
|
for field in fields(cls):
|
||||||
|
if field.name in hints:
|
||||||
|
# Since fields have been frozen we must work around it.
|
||||||
|
_obj_setattr(field, "type", hints[field.name])
|
||||||
|
cls.__attrs_types_resolved__ = True
|
||||||
|
|
||||||
|
# Return the class so you can use it as a decorator too.
|
||||||
|
return cls
|
||||||
|
File diff suppressed because it is too large
Load Diff
160
venv/Lib/site-packages/attr/_next_gen.py
Normal file
160
venv/Lib/site-packages/attr/_next_gen.py
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
"""
|
||||||
|
This is a Python 3.6 and later-only, keyword-only, and **provisional** API that
|
||||||
|
calls `attr.s` with different default values.
|
||||||
|
|
||||||
|
Provisional APIs that shall become "import attrs" one glorious day.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from attr.exceptions import UnannotatedAttributeError
|
||||||
|
|
||||||
|
from . import setters
|
||||||
|
from ._make import NOTHING, _frozen_setattrs, attrib, attrs
|
||||||
|
|
||||||
|
|
||||||
|
def define(
|
||||||
|
maybe_cls=None,
|
||||||
|
*,
|
||||||
|
these=None,
|
||||||
|
repr=None,
|
||||||
|
hash=None,
|
||||||
|
init=None,
|
||||||
|
slots=True,
|
||||||
|
frozen=False,
|
||||||
|
weakref_slot=True,
|
||||||
|
str=False,
|
||||||
|
auto_attribs=None,
|
||||||
|
kw_only=False,
|
||||||
|
cache_hash=False,
|
||||||
|
auto_exc=True,
|
||||||
|
eq=None,
|
||||||
|
order=False,
|
||||||
|
auto_detect=True,
|
||||||
|
getstate_setstate=None,
|
||||||
|
on_setattr=None,
|
||||||
|
field_transformer=None,
|
||||||
|
):
|
||||||
|
r"""
|
||||||
|
The only behavioral differences are the handling of the *auto_attribs*
|
||||||
|
option:
|
||||||
|
|
||||||
|
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
|
||||||
|
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
|
||||||
|
|
||||||
|
1. If all attributes are annotated and no `attr.ib` is found, it assumes
|
||||||
|
*auto_attribs=True*.
|
||||||
|
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||||
|
`attr.ib`\ s.
|
||||||
|
|
||||||
|
and that mutable classes (``frozen=False``) validate on ``__setattr__``.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
def do_it(cls, auto_attribs):
|
||||||
|
return attrs(
|
||||||
|
maybe_cls=cls,
|
||||||
|
these=these,
|
||||||
|
repr=repr,
|
||||||
|
hash=hash,
|
||||||
|
init=init,
|
||||||
|
slots=slots,
|
||||||
|
frozen=frozen,
|
||||||
|
weakref_slot=weakref_slot,
|
||||||
|
str=str,
|
||||||
|
auto_attribs=auto_attribs,
|
||||||
|
kw_only=kw_only,
|
||||||
|
cache_hash=cache_hash,
|
||||||
|
auto_exc=auto_exc,
|
||||||
|
eq=eq,
|
||||||
|
order=order,
|
||||||
|
auto_detect=auto_detect,
|
||||||
|
collect_by_mro=True,
|
||||||
|
getstate_setstate=getstate_setstate,
|
||||||
|
on_setattr=on_setattr,
|
||||||
|
field_transformer=field_transformer,
|
||||||
|
)
|
||||||
|
|
||||||
|
def wrap(cls):
|
||||||
|
"""
|
||||||
|
Making this a wrapper ensures this code runs during class creation.
|
||||||
|
|
||||||
|
We also ensure that frozen-ness of classes is inherited.
|
||||||
|
"""
|
||||||
|
nonlocal frozen, on_setattr
|
||||||
|
|
||||||
|
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||||
|
|
||||||
|
# By default, mutable classes validate on setattr.
|
||||||
|
if frozen is False and on_setattr is None:
|
||||||
|
on_setattr = setters.validate
|
||||||
|
|
||||||
|
# However, if we subclass a frozen class, we inherit the immutability
|
||||||
|
# and disable on_setattr.
|
||||||
|
for base_cls in cls.__bases__:
|
||||||
|
if base_cls.__setattr__ is _frozen_setattrs:
|
||||||
|
if had_on_setattr:
|
||||||
|
raise ValueError(
|
||||||
|
"Frozen classes can't use on_setattr "
|
||||||
|
"(frozen-ness was inherited)."
|
||||||
|
)
|
||||||
|
|
||||||
|
on_setattr = setters.NO_OP
|
||||||
|
break
|
||||||
|
|
||||||
|
if auto_attribs is not None:
|
||||||
|
return do_it(cls, auto_attribs)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return do_it(cls, True)
|
||||||
|
except UnannotatedAttributeError:
|
||||||
|
return do_it(cls, False)
|
||||||
|
|
||||||
|
# maybe_cls's type depends on the usage of the decorator. It's a class
|
||||||
|
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
|
||||||
|
if maybe_cls is None:
|
||||||
|
return wrap
|
||||||
|
else:
|
||||||
|
return wrap(maybe_cls)
|
||||||
|
|
||||||
|
|
||||||
|
mutable = define
|
||||||
|
frozen = partial(define, frozen=True, on_setattr=None)
|
||||||
|
|
||||||
|
|
||||||
|
def field(
|
||||||
|
*,
|
||||||
|
default=NOTHING,
|
||||||
|
validator=None,
|
||||||
|
repr=True,
|
||||||
|
hash=None,
|
||||||
|
init=True,
|
||||||
|
metadata=None,
|
||||||
|
converter=None,
|
||||||
|
factory=None,
|
||||||
|
kw_only=False,
|
||||||
|
eq=None,
|
||||||
|
order=None,
|
||||||
|
on_setattr=None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Identical to `attr.ib`, except keyword-only and with some arguments
|
||||||
|
removed.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
return attrib(
|
||||||
|
default=default,
|
||||||
|
validator=validator,
|
||||||
|
repr=repr,
|
||||||
|
hash=hash,
|
||||||
|
init=init,
|
||||||
|
metadata=metadata,
|
||||||
|
converter=converter,
|
||||||
|
factory=factory,
|
||||||
|
kw_only=kw_only,
|
||||||
|
eq=eq,
|
||||||
|
order=order,
|
||||||
|
on_setattr=on_setattr,
|
||||||
|
)
|
@ -4,7 +4,14 @@ Commonly useful converters.
|
|||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
from ._make import NOTHING, Factory
|
from ._make import NOTHING, Factory, pipe
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"pipe",
|
||||||
|
"optional",
|
||||||
|
"default_if_none",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def optional(converter):
|
def optional(converter):
|
||||||
|
@ -3,10 +3,9 @@ from . import _ConverterType
|
|||||||
|
|
||||||
_T = TypeVar("_T")
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
def optional(
|
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||||
converter: _ConverterType[_T]
|
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||||
) -> _ConverterType[Optional[_T]]: ...
|
|
||||||
@overload
|
@overload
|
||||||
def default_if_none(default: _T) -> _ConverterType[_T]: ...
|
def default_if_none(default: _T) -> _ConverterType: ...
|
||||||
@overload
|
@overload
|
||||||
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ...
|
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
|
||||||
|
@ -1,20 +1,37 @@
|
|||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
class FrozenInstanceError(AttributeError):
|
class FrozenError(AttributeError):
|
||||||
"""
|
"""
|
||||||
A frozen/immutable instance has been attempted to be modified.
|
A frozen/immutable instance or attribute haave been attempted to be
|
||||||
|
modified.
|
||||||
|
|
||||||
It mirrors the behavior of ``namedtuples`` by using the same error message
|
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||||
and subclassing `AttributeError`.
|
and subclassing `AttributeError`.
|
||||||
|
|
||||||
.. versionadded:: 16.1.0
|
.. versionadded:: 20.1.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
msg = "can't set attribute"
|
msg = "can't set attribute"
|
||||||
args = [msg]
|
args = [msg]
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenInstanceError(FrozenError):
|
||||||
|
"""
|
||||||
|
A frozen instance has been attempted to be modified.
|
||||||
|
|
||||||
|
.. versionadded:: 16.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenAttributeError(FrozenError):
|
||||||
|
"""
|
||||||
|
A frozen attribute has been attempted to be modified.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class AttrsAttributeNotFoundError(ValueError):
|
class AttrsAttributeNotFoundError(ValueError):
|
||||||
"""
|
"""
|
||||||
An ``attrs`` function couldn't find an attribute that the user asked for.
|
An ``attrs`` function couldn't find an attribute that the user asked for.
|
||||||
@ -51,7 +68,8 @@ class UnannotatedAttributeError(RuntimeError):
|
|||||||
|
|
||||||
class PythonTooOldError(RuntimeError):
|
class PythonTooOldError(RuntimeError):
|
||||||
"""
|
"""
|
||||||
An ``attrs`` feature requiring a more recent python version has been used.
|
It was attempted to use an ``attrs`` feature that requires a newer Python
|
||||||
|
version.
|
||||||
|
|
||||||
.. versionadded:: 18.2.0
|
.. versionadded:: 18.2.0
|
||||||
"""
|
"""
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
class FrozenInstanceError(AttributeError):
|
class FrozenError(AttributeError):
|
||||||
msg: str = ...
|
msg: str = ...
|
||||||
|
|
||||||
|
class FrozenInstanceError(FrozenError): ...
|
||||||
|
class FrozenAttributeError(FrozenError): ...
|
||||||
class AttrsAttributeNotFoundError(ValueError): ...
|
class AttrsAttributeNotFoundError(ValueError): ...
|
||||||
class NotAnAttrsClassError(ValueError): ...
|
class NotAnAttrsClassError(ValueError): ...
|
||||||
class DefaultAlreadySetError(RuntimeError): ...
|
class DefaultAlreadySetError(RuntimeError): ...
|
||||||
|
77
venv/Lib/site-packages/attr/setters.py
Normal file
77
venv/Lib/site-packages/attr/setters.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Commonly used hooks for on_setattr.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from . import _config
|
||||||
|
from .exceptions import FrozenAttributeError
|
||||||
|
|
||||||
|
|
||||||
|
def pipe(*setters):
|
||||||
|
"""
|
||||||
|
Run all *setters* and return the return value of the last one.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
def wrapped_pipe(instance, attrib, new_value):
|
||||||
|
rv = new_value
|
||||||
|
|
||||||
|
for setter in setters:
|
||||||
|
rv = setter(instance, attrib, rv)
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
return wrapped_pipe
|
||||||
|
|
||||||
|
|
||||||
|
def frozen(_, __, ___):
|
||||||
|
"""
|
||||||
|
Prevent an attribute to be modified.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
raise FrozenAttributeError()
|
||||||
|
|
||||||
|
|
||||||
|
def validate(instance, attrib, new_value):
|
||||||
|
"""
|
||||||
|
Run *attrib*'s validator on *new_value* if it has one.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
if _config._run_validators is False:
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
v = attrib.validator
|
||||||
|
if not v:
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
v(instance, attrib, new_value)
|
||||||
|
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
|
||||||
|
def convert(instance, attrib, new_value):
|
||||||
|
"""
|
||||||
|
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
|
||||||
|
result.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
||||||
|
c = attrib.converter
|
||||||
|
if c:
|
||||||
|
return c(new_value)
|
||||||
|
|
||||||
|
return new_value
|
||||||
|
|
||||||
|
|
||||||
|
NO_OP = object()
|
||||||
|
"""
|
||||||
|
Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||||
|
|
||||||
|
Does not work in `pipe` or within lists.
|
||||||
|
|
||||||
|
.. versionadded:: 20.1.0
|
||||||
|
"""
|
18
venv/Lib/site-packages/attr/setters.pyi
Normal file
18
venv/Lib/site-packages/attr/setters.pyi
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from . import _OnSetAttrType, Attribute
|
||||||
|
from typing import TypeVar, Any, NewType, NoReturn, cast
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
|
def frozen(
|
||||||
|
instance: Any, attribute: Attribute, new_value: Any
|
||||||
|
) -> NoReturn: ...
|
||||||
|
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||||
|
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||||
|
|
||||||
|
# convert is allowed to return Any, because they can be chained using pipe.
|
||||||
|
def convert(
|
||||||
|
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||||
|
) -> Any: ...
|
||||||
|
|
||||||
|
_NoOpType = NewType("_NoOpType", object)
|
||||||
|
NO_OP: _NoOpType
|
@ -67,7 +67,7 @@ def instance_of(type):
|
|||||||
return _InstanceOfValidator(type)
|
return _InstanceOfValidator(type)
|
||||||
|
|
||||||
|
|
||||||
@attrs(repr=False, frozen=True)
|
@attrs(repr=False, frozen=True, slots=True)
|
||||||
class _MatchesReValidator(object):
|
class _MatchesReValidator(object):
|
||||||
regex = attrib()
|
regex = attrib()
|
||||||
flags = attrib()
|
flags = attrib()
|
||||||
@ -171,7 +171,8 @@ def provides(interface):
|
|||||||
performed using ``interface.providedBy(value)`` (see `zope.interface
|
performed using ``interface.providedBy(value)`` (see `zope.interface
|
||||||
<https://zopeinterface.readthedocs.io/en/latest/>`_).
|
<https://zopeinterface.readthedocs.io/en/latest/>`_).
|
||||||
|
|
||||||
:param zope.interface.Interface interface: The interface to check for.
|
:param interface: The interface to check for.
|
||||||
|
:type interface: ``zope.interface.Interface``
|
||||||
|
|
||||||
:raises TypeError: With a human readable error message, the attribute
|
:raises TypeError: With a human readable error message, the attribute
|
||||||
(of type `attr.Attribute`), the expected interface, and the
|
(of type `attr.Attribute`), the expected interface, and the
|
||||||
|
@ -1,33 +0,0 @@
|
|||||||
attr/__init__.py,sha256=ONaI-ZEGOIC7IDqH2HANgesnOxPE1m0GIRRYPPsXEHk,1349
|
|
||||||
attr/__init__.pyi,sha256=fOnMRTF00b5J23PYPF74u66UVhVzzm0KYVxzmVXHPw0,8257
|
|
||||||
attr/__pycache__/__init__.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/_compat.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/_config.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/_funcs.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/_make.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/_version_info.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/converters.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/exceptions.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/filters.cpython-36.pyc,,
|
|
||||||
attr/__pycache__/validators.cpython-36.pyc,,
|
|
||||||
attr/_compat.py,sha256=-pJtdtqgCg0K6rH_BWf3wKuTum58GD-WWPclQQ2SUaU,7326
|
|
||||||
attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514
|
|
||||||
attr/_funcs.py,sha256=unAJfNGSTOzxyFzkj7Rs3O1bfsQodmXyir9uZKen-vY,9696
|
|
||||||
attr/_make.py,sha256=HhjGhFEbnxPKuUb9hFmAjXoQGpekniw1IEF3_Z-vwCc,70807
|
|
||||||
attr/_version_info.py,sha256=azMi1lNelb3cJvvYUMXsXVbUANkRzbD5IEiaXVpeVr4,2162
|
|
||||||
attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
|
|
||||||
attr/converters.py,sha256=5QJRYSXE8G7PW0289y_SPwvvZIcw-nJIuBlfYVdB4BQ,2141
|
|
||||||
attr/converters.pyi,sha256=wAhCoOT1MFV8t323rpD87O7bxQ8CYLTPiBQd-29BieI,351
|
|
||||||
attr/exceptions.py,sha256=hbhOa3b4W8_mRrbj3FsMTR4Bt5xzbJs5xaFTWn8s6h4,1635
|
|
||||||
attr/exceptions.pyi,sha256=4zuaJyl2axxWbqnZgxo_2oTpPNbyowEw3A4hqV5PmAc,458
|
|
||||||
attr/filters.py,sha256=weDxwATsa69T_0bPVjiM1fGsciAMQmwhY5G8Jm5BxuI,1098
|
|
||||||
attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214
|
|
||||||
attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
attr/validators.py,sha256=8AsxgdDgh3sGPseiUIMPGcTr6PvaDYfH3AK46tsvs8U,11460
|
|
||||||
attr/validators.pyi,sha256=vZgsJqUwrJevh4v_Hd7_RSXqDrBctE6-3AEZ7uYKodo,1868
|
|
||||||
attrs-19.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
attrs-19.3.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082
|
|
||||||
attrs-19.3.0.dist-info/METADATA,sha256=WmnjYy_TftebL3pewXyGEaD4TZRrLUEHk3frEkAtqL0,9022
|
|
||||||
attrs-19.3.0.dist-info/RECORD,,
|
|
||||||
attrs-19.3.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
|
|
||||||
attrs-19.3.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5
|
|
11
venv/Lib/site-packages/attrs-20.3.0.dist-info/AUTHORS.rst
Normal file
11
venv/Lib/site-packages/attrs-20.3.0.dist-info/AUTHORS.rst
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
Credits
|
||||||
|
=======
|
||||||
|
|
||||||
|
``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
|
||||||
|
|
||||||
|
The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
|
||||||
|
|
||||||
|
A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
|
||||||
|
|
||||||
|
It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
|
||||||
|
Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
|
@ -1,6 +1,6 @@
|
|||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: attrs
|
Name: attrs
|
||||||
Version: 19.3.0
|
Version: 20.3.0
|
||||||
Summary: Classes Without Boilerplate
|
Summary: Classes Without Boilerplate
|
||||||
Home-page: https://www.attrs.org/
|
Home-page: https://www.attrs.org/
|
||||||
Author: Hynek Schlawack
|
Author: Hynek Schlawack
|
||||||
@ -11,6 +11,8 @@ License: MIT
|
|||||||
Project-URL: Documentation, https://www.attrs.org/
|
Project-URL: Documentation, https://www.attrs.org/
|
||||||
Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues
|
Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues
|
||||||
Project-URL: Source Code, https://github.com/python-attrs/attrs
|
Project-URL: Source Code, https://github.com/python-attrs/attrs
|
||||||
|
Project-URL: Funding, https://github.com/sponsors/hynek
|
||||||
|
Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi
|
||||||
Keywords: class,attribute,boilerplate
|
Keywords: class,attribute,boilerplate
|
||||||
Platform: UNKNOWN
|
Platform: UNKNOWN
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
@ -22,43 +24,43 @@ Classifier: Programming Language :: Python
|
|||||||
Classifier: Programming Language :: Python :: 2
|
Classifier: Programming Language :: Python :: 2
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
Classifier: Programming Language :: Python :: 3
|
Classifier: Programming Language :: Python :: 3
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
Classifier: Programming Language :: Python :: 3.6
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
|
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
|
||||||
Description-Content-Type: text/x-rst
|
Description-Content-Type: text/x-rst
|
||||||
Provides-Extra: azure-pipelines
|
|
||||||
Requires-Dist: coverage ; extra == 'azure-pipelines'
|
|
||||||
Requires-Dist: hypothesis ; extra == 'azure-pipelines'
|
|
||||||
Requires-Dist: pympler ; extra == 'azure-pipelines'
|
|
||||||
Requires-Dist: pytest (>=4.3.0) ; extra == 'azure-pipelines'
|
|
||||||
Requires-Dist: six ; extra == 'azure-pipelines'
|
|
||||||
Requires-Dist: zope.interface ; extra == 'azure-pipelines'
|
|
||||||
Requires-Dist: pytest-azurepipelines ; extra == 'azure-pipelines'
|
|
||||||
Provides-Extra: dev
|
Provides-Extra: dev
|
||||||
Requires-Dist: coverage ; extra == 'dev'
|
Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev'
|
||||||
Requires-Dist: hypothesis ; extra == 'dev'
|
Requires-Dist: hypothesis ; extra == 'dev'
|
||||||
Requires-Dist: pympler ; extra == 'dev'
|
Requires-Dist: pympler ; extra == 'dev'
|
||||||
Requires-Dist: pytest (>=4.3.0) ; extra == 'dev'
|
Requires-Dist: pytest (>=4.3.0) ; extra == 'dev'
|
||||||
Requires-Dist: six ; extra == 'dev'
|
Requires-Dist: six ; extra == 'dev'
|
||||||
Requires-Dist: zope.interface ; extra == 'dev'
|
Requires-Dist: zope.interface ; extra == 'dev'
|
||||||
|
Requires-Dist: furo ; extra == 'dev'
|
||||||
Requires-Dist: sphinx ; extra == 'dev'
|
Requires-Dist: sphinx ; extra == 'dev'
|
||||||
Requires-Dist: pre-commit ; extra == 'dev'
|
Requires-Dist: pre-commit ; extra == 'dev'
|
||||||
Provides-Extra: docs
|
Provides-Extra: docs
|
||||||
|
Requires-Dist: furo ; extra == 'docs'
|
||||||
Requires-Dist: sphinx ; extra == 'docs'
|
Requires-Dist: sphinx ; extra == 'docs'
|
||||||
Requires-Dist: zope.interface ; extra == 'docs'
|
Requires-Dist: zope.interface ; extra == 'docs'
|
||||||
Provides-Extra: tests
|
Provides-Extra: tests
|
||||||
Requires-Dist: coverage ; extra == 'tests'
|
Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests'
|
||||||
Requires-Dist: hypothesis ; extra == 'tests'
|
Requires-Dist: hypothesis ; extra == 'tests'
|
||||||
Requires-Dist: pympler ; extra == 'tests'
|
Requires-Dist: pympler ; extra == 'tests'
|
||||||
Requires-Dist: pytest (>=4.3.0) ; extra == 'tests'
|
Requires-Dist: pytest (>=4.3.0) ; extra == 'tests'
|
||||||
Requires-Dist: six ; extra == 'tests'
|
Requires-Dist: six ; extra == 'tests'
|
||||||
Requires-Dist: zope.interface ; extra == 'tests'
|
Requires-Dist: zope.interface ; extra == 'tests'
|
||||||
|
Provides-Extra: tests_no_zope
|
||||||
|
Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope'
|
||||||
|
Requires-Dist: hypothesis ; extra == 'tests_no_zope'
|
||||||
|
Requires-Dist: pympler ; extra == 'tests_no_zope'
|
||||||
|
Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope'
|
||||||
|
Requires-Dist: six ; extra == 'tests_no_zope'
|
||||||
|
|
||||||
.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
|
.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png
|
||||||
:alt: attrs Logo
|
:alt: attrs Logo
|
||||||
@ -71,8 +73,8 @@ Requires-Dist: zope.interface ; extra == 'tests'
|
|||||||
:target: https://www.attrs.org/en/stable/?badge=stable
|
:target: https://www.attrs.org/en/stable/?badge=stable
|
||||||
:alt: Documentation Status
|
:alt: Documentation Status
|
||||||
|
|
||||||
.. image:: https://attrs.visualstudio.com/attrs/_apis/build/status/python-attrs.attrs?branchName=master
|
.. image:: https://github.com/python-attrs/attrs/workflows/CI/badge.svg?branch=master
|
||||||
:target: https://attrs.visualstudio.com/attrs/_build/latest?definitionId=1&branchName=master
|
:target: https://github.com/python-attrs/attrs/actions?workflow=CI
|
||||||
:alt: CI Status
|
:alt: CI Status
|
||||||
|
|
||||||
.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg
|
.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg
|
||||||
@ -89,7 +91,7 @@ Requires-Dist: zope.interface ; extra == 'tests'
|
|||||||
|
|
||||||
Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
|
Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
|
||||||
|
|
||||||
.. -spiel-end-
|
.. teaser-end
|
||||||
|
|
||||||
For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
|
For that, it gives you a class decorator and a way to declaratively define the attributes on that class:
|
||||||
|
|
||||||
@ -147,44 +149,18 @@ Which in turn encourages you to write *small classes* that do `one thing well <h
|
|||||||
Never again violate the `single responsibility principle <https://en.wikipedia.org/wiki/Single_responsibility_principle>`_ just because implementing ``__init__`` et al is a painful drag.
|
Never again violate the `single responsibility principle <https://en.wikipedia.org/wiki/Single_responsibility_principle>`_ just because implementing ``__init__`` et al is a painful drag.
|
||||||
|
|
||||||
|
|
||||||
.. -testimonials-
|
.. -getting-help-
|
||||||
|
|
||||||
Testimonials
|
|
||||||
============
|
|
||||||
|
|
||||||
**Amber Hawkie Brown**, Twisted Release Manager and Computer Owl:
|
|
||||||
|
|
||||||
Writing a fully-functional class using attrs takes me less time than writing this testimonial.
|
|
||||||
|
|
||||||
|
|
||||||
**Glyph Lefkowitz**, creator of `Twisted <https://twistedmatrix.com/>`_, `Automat <https://pypi.org/project/Automat/>`_, and other open source software, in `The One Python Library Everyone Needs <https://glyph.twistedmatrix.com/2016/08/attrs.html>`_:
|
|
||||||
|
|
||||||
I’m looking forward to is being able to program in Python-with-attrs everywhere.
|
|
||||||
It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in.
|
|
||||||
|
|
||||||
|
|
||||||
**Kenneth Reitz**, creator of `Requests <https://github.com/psf/requests>`_ (`on paper no less <https://twitter.com/hynek/status/866817877650751488>`_!):
|
|
||||||
|
|
||||||
attrs—classes for humans. I like it.
|
|
||||||
|
|
||||||
|
|
||||||
**Łukasz Langa**, creator of `Black <https://github.com/psf/black>`_, prolific Python core developer, and release manager for Python 3.8 and 3.9:
|
|
||||||
|
|
||||||
I'm increasingly digging your attr.ocity. Good job!
|
|
||||||
|
|
||||||
|
|
||||||
.. -end-
|
|
||||||
|
|
||||||
.. -project-information-
|
|
||||||
|
|
||||||
Getting Help
|
Getting Help
|
||||||
============
|
============
|
||||||
|
|
||||||
Please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
|
Please use the ``python-attrs`` tag on `StackOverflow <https://stackoverflow.com/questions/tagged/python-attrs>`_ to get help.
|
||||||
|
|
||||||
Answering questions of your fellow developers is also great way to help the project!
|
Answering questions of your fellow developers is also a great way to help the project!
|
||||||
|
|
||||||
|
|
||||||
|
.. -project-information-
|
||||||
|
|
||||||
Project Information
|
Project Information
|
||||||
===================
|
===================
|
||||||
|
|
||||||
@ -192,7 +168,7 @@ Project Information
|
|||||||
its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
|
its documentation lives at `Read the Docs <https://www.attrs.org/>`_,
|
||||||
the code on `GitHub <https://github.com/python-attrs/attrs>`_,
|
the code on `GitHub <https://github.com/python-attrs/attrs>`_,
|
||||||
and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
|
and the latest release on `PyPI <https://pypi.org/project/attrs/>`_.
|
||||||
It’s rigorously tested on Python 2.7, 3.4+, and PyPy.
|
It’s rigorously tested on Python 2.7, 3.5+, and PyPy.
|
||||||
|
|
||||||
We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
|
We collect information on **third-party extensions** in our `wiki <https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs>`_.
|
||||||
Feel free to browse and add your own!
|
Feel free to browse and add your own!
|
||||||
@ -200,17 +176,53 @@ Feel free to browse and add your own!
|
|||||||
If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://www.attrs.org/en/latest/contributing.html>`_ to get you started!
|
If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide <https://www.attrs.org/en/latest/contributing.html>`_ to get you started!
|
||||||
|
|
||||||
|
|
||||||
|
``attrs`` for Enterprise
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Available as part of the Tidelift Subscription.
|
||||||
|
|
||||||
|
The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
|
||||||
|
Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
|
||||||
|
`Learn more. <https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_
|
||||||
|
|
||||||
|
|
||||||
Release Information
|
Release Information
|
||||||
===================
|
===================
|
||||||
|
|
||||||
19.3.0 (2019-10-15)
|
20.3.0 (2020-11-05)
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
Backward-incompatible Changes
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
- ``attr.define()``, ``attr.frozen()``, ``attr.mutable()``, and ``attr.field()`` remain **provisional**.
|
||||||
|
|
||||||
|
This release does **not** change change anything about them and they are already used widely in production though.
|
||||||
|
|
||||||
|
If you wish to use them together with mypy, you can simply drop `this plugin <https://gist.github.com/hynek/1e3844d0c99e479e716169034b5fa963#file-attrs_ng_plugin-py>`_ into your project.
|
||||||
|
|
||||||
|
Feel free to provide feedback to them in the linked issue #668.
|
||||||
|
|
||||||
|
We will release the ``attrs`` namespace once we have the feeling that the APIs have properly settled.
|
||||||
|
`#668 <https://github.com/python-attrs/attrs/issues/668>`_
|
||||||
|
|
||||||
|
|
||||||
Changes
|
Changes
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
|
|
||||||
- Fixed ``auto_attribs`` usage when default values cannot be compared directly with ``==``, such as ``numpy`` arrays.
|
- ``attr.s()`` now has a *field_transformer* hook that is called for all ``Attribute``\ s and returns a (modified or updated) list of ``Attribute`` instances.
|
||||||
`#585 <https://github.com/python-attrs/attrs/issues/585>`_
|
``attr.asdict()`` has a *value_serializer* hook that can change the way values are converted.
|
||||||
|
Both hooks are meant to help with data (de-)serialization workflows.
|
||||||
|
`#653 <https://github.com/python-attrs/attrs/issues/653>`_
|
||||||
|
- ``kw_only=True`` now works on Python 2.
|
||||||
|
`#700 <https://github.com/python-attrs/attrs/issues/700>`_
|
||||||
|
- ``raise from`` now works on frozen classes on PyPy.
|
||||||
|
`#703 <https://github.com/python-attrs/attrs/issues/703>`_,
|
||||||
|
`#712 <https://github.com/python-attrs/attrs/issues/712>`_
|
||||||
|
- ``attr.asdict()`` and ``attr.astuple()`` now treat ``frozenset``\ s like ``set``\ s with regards to the *retain_collection_types* argument.
|
||||||
|
`#704 <https://github.com/python-attrs/attrs/issues/704>`_
|
||||||
|
- The type stubs for ``attr.s()`` and ``attr.make_class()`` are not missing the *collect_by_mro* argument anymore.
|
||||||
|
`#711 <https://github.com/python-attrs/attrs/issues/711>`_
|
||||||
|
|
||||||
`Full changelog <https://www.attrs.org/en/stable/changelog.html>`_.
|
`Full changelog <https://www.attrs.org/en/stable/changelog.html>`_.
|
||||||
|
|
39
venv/Lib/site-packages/attrs-20.3.0.dist-info/RECORD
Normal file
39
venv/Lib/site-packages/attrs-20.3.0.dist-info/RECORD
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
attr/__init__.py,sha256=70KmZOgz2sUvtRTC_IuXEeN2ttOyBWHn4XA59aqGXPs,1568
|
||||||
|
attr/__init__.pyi,sha256=ca_4sg7z0e_EL7ehy-flXVGAju5PBX2hVo51dUmPMi0,12986
|
||||||
|
attr/__pycache__/__init__.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/_compat.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/_config.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/_funcs.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/_make.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/_next_gen.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/_version_info.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/converters.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/exceptions.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/filters.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/setters.cpython-36.pyc,,
|
||||||
|
attr/__pycache__/validators.cpython-36.pyc,,
|
||||||
|
attr/_compat.py,sha256=rZhpP09xbyWSzMv796XQbryIr21oReJFvA70G3lrHxg,7308
|
||||||
|
attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514
|
||||||
|
attr/_funcs.py,sha256=PvFQlflEswO_qIR2sUr4a4x8ggQpEoDKe3YKM2rLJu4,13081
|
||||||
|
attr/_make.py,sha256=61XB4-SHQpFbWbStGWotTTbzVT2m49DUovRgnxpMqmU,88313
|
||||||
|
attr/_next_gen.py,sha256=x6TU2rVOXmFmrNNvkfshJsxyRbAAK0wDI4SJV2OI97c,4138
|
||||||
|
attr/_version_info.py,sha256=azMi1lNelb3cJvvYUMXsXVbUANkRzbD5IEiaXVpeVr4,2162
|
||||||
|
attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
|
||||||
|
attr/converters.py,sha256=CaK6iLtEMmemrqU8LQ1D2nWtbo9dGPAv4UaZ0rFzhOA,2214
|
||||||
|
attr/converters.pyi,sha256=fVGSfawF3NMy2EBApkC7dAwMuujWCHnGEnnAgsbkVpg,380
|
||||||
|
attr/exceptions.py,sha256=gmlET97ikqdQVvy7Ff9p7zVvqc2SsNtTd-r30pva1GE,1950
|
||||||
|
attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
|
||||||
|
attr/filters.py,sha256=weDxwATsa69T_0bPVjiM1fGsciAMQmwhY5G8Jm5BxuI,1098
|
||||||
|
attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214
|
||||||
|
attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
attr/setters.py,sha256=0ElzHwdVK3dsYcQi2CXkFvhx8fNxUI5OVhw8SWeaKmA,1434
|
||||||
|
attr/setters.pyi,sha256=SYr6adhx4f0dSkmmBICg6eK8WMev5jT-KJQJTdul078,567
|
||||||
|
attr/validators.py,sha256=6DBx1jt4oZxx1ppvx6JWqm9-UAsYpXC4HTwxJilCeRg,11497
|
||||||
|
attr/validators.pyi,sha256=vZgsJqUwrJevh4v_Hd7_RSXqDrBctE6-3AEZ7uYKodo,1868
|
||||||
|
attrs-20.3.0.dist-info/AUTHORS.rst,sha256=wsqCNbGz_mklcJrt54APIZHZpoTIJLkXqEhhn4Nd8hc,752
|
||||||
|
attrs-20.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
attrs-20.3.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082
|
||||||
|
attrs-20.3.0.dist-info/METADATA,sha256=2XTmALrRRbIZj9J8pJgpKYnyATu_NAL8vfUnqRFpE5w,10220
|
||||||
|
attrs-20.3.0.dist-info/RECORD,,
|
||||||
|
attrs-20.3.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
|
||||||
|
attrs-20.3.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5
|
@ -1,5 +1,5 @@
|
|||||||
Wheel-Version: 1.0
|
Wheel-Version: 1.0
|
||||||
Generator: bdist_wheel (0.32.2)
|
Generator: bdist_wheel (0.35.1)
|
||||||
Root-Is-Purelib: true
|
Root-Is-Purelib: true
|
||||||
Tag: py2-none-any
|
Tag: py2-none-any
|
||||||
Tag: py3-none-any
|
Tag: py3-none-any
|
@ -1,26 +0,0 @@
|
|||||||
|
|
||||||
Except when otherwise stated (look for LICENSE files in directories or
|
|
||||||
information at the beginning of each file) all software and
|
|
||||||
documentation is licensed as follows:
|
|
||||||
|
|
||||||
The MIT License
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person
|
|
||||||
obtaining a copy of this software and associated documentation
|
|
||||||
files (the "Software"), to deal in the Software without
|
|
||||||
restriction, including without limitation the rights to use,
|
|
||||||
copy, modify, merge, publish, distribute, sublicense, and/or
|
|
||||||
sell copies of the Software, and to permit persons to whom the
|
|
||||||
Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included
|
|
||||||
in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
|
||||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
||||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: cffi
|
|
||||||
Version: 1.14.0
|
|
||||||
Summary: Foreign Function Interface for Python calling C code.
|
|
||||||
Home-page: http://cffi.readthedocs.org
|
|
||||||
Author: Armin Rigo, Maciej Fijalkowski
|
|
||||||
Author-email: python-cffi@googlegroups.com
|
|
||||||
License: MIT
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 2
|
|
||||||
Classifier: Programming Language :: Python :: 2.6
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.2
|
|
||||||
Classifier: Programming Language :: Python :: 3.3
|
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Requires-Dist: pycparser
|
|
||||||
|
|
||||||
|
|
||||||
CFFI
|
|
||||||
====
|
|
||||||
|
|
||||||
Foreign Function Interface for Python calling C code.
|
|
||||||
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
|
|
||||||
|
|
||||||
Contact
|
|
||||||
-------
|
|
||||||
|
|
||||||
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
|
|
||||||
|
|
||||||
|
|
@ -1,44 +0,0 @@
|
|||||||
_cffi_backend.cp36-win_amd64.pyd,sha256=Fl9fNCxcVgweZH6L64L1BE1akXg3VPw4uvmSXtUuKQ0,180736
|
|
||||||
cffi-1.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
cffi-1.14.0.dist-info/LICENSE,sha256=esEZUOct9bRcUXFqeyLnuzSzJNZ_Bl4pOBUt1HLEgV8,1320
|
|
||||||
cffi-1.14.0.dist-info/METADATA,sha256=tZUQbfePzTlM58vXdkcDc1ID5OsPswm6VNj3Ds5VzRU,1191
|
|
||||||
cffi-1.14.0.dist-info/RECORD,,
|
|
||||||
cffi-1.14.0.dist-info/WHEEL,sha256=Mp7cNSJkwOccBZaxEtyYhlze1HXRCLLX3tIlVLTix2E,106
|
|
||||||
cffi-1.14.0.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
|
|
||||||
cffi-1.14.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
|
|
||||||
cffi/__init__.py,sha256=rt03V1G6kc9Hk9MFKivO7NokqmXzEGBtiEVMEoD3erk,527
|
|
||||||
cffi/__pycache__/__init__.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/api.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/backend_ctypes.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/cffi_opcode.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/commontypes.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/cparser.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/error.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/ffiplatform.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/lock.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/model.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/pkgconfig.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/recompiler.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/setuptools_ext.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/vengine_cpy.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/vengine_gen.cpython-36.pyc,,
|
|
||||||
cffi/__pycache__/verifier.cpython-36.pyc,,
|
|
||||||
cffi/_cffi_errors.h,sha256=INd0GxZQna8TTRYNOOr9_iFy0FZa84I_KH1qlmPgulQ,4003
|
|
||||||
cffi/_cffi_include.h,sha256=SZmeKYOZhH9yEe9YQko2yqFar_GPhzlbbZXIQdKvPxg,13838
|
|
||||||
cffi/_embedding.h,sha256=ESJSMS67PytRAu1ZQsNpYYOpAKzwSO4H9yR6vQBDVK0,17931
|
|
||||||
cffi/api.py,sha256=Xs_dAN5x1ehfnn_F9ZTdA3Ce0bmPrqeIOkO4Ya1tfbQ,43029
|
|
||||||
cffi/backend_ctypes.py,sha256=BHN3q2giL2_Y8wMDST2CIcc_qoMrs65qV9Ob5JvxBZ4,43575
|
|
||||||
cffi/cffi_opcode.py,sha256=57P2NHLZkuTWueZybu5iosWljb6ocQmUXzGrCplrnyE,5911
|
|
||||||
cffi/commontypes.py,sha256=mEZD4g0qtadnv6O6CEXvMQaJ1K6SRbG5S1h4YvVZHOU,2769
|
|
||||||
cffi/cparser.py,sha256=Jbm7NYmhQky0W6_VvYy-HLZnv3kdrKCw5HkpqLAijH4,43034
|
|
||||||
cffi/error.py,sha256=Bka7fSV22aIglTQDPIDfpnxTc1aWZLMQdQOJY-h_PUA,908
|
|
||||||
cffi/ffiplatform.py,sha256=qioydJeC63dEvrQ3ht5_BPmSs7wzzzuWnZAJtfhic7I,4173
|
|
||||||
cffi/lock.py,sha256=vnbsel7392Ib8gGBifIfAfc7MHteSwd3nP725pvc25Q,777
|
|
||||||
cffi/model.py,sha256=m6ExPe_NyemlW1cPV-RkptNv9MDQGEey9MDQAoOu-XQ,22296
|
|
||||||
cffi/parse_c_type.h,sha256=fKYNqWNX5f9kZNNhbXcRLTOlpRGRhh8eCLyHmTXIZnQ,6157
|
|
||||||
cffi/pkgconfig.py,sha256=9zDcDf0XKIJaxFHLg7e-W8-Xb8Yq5hdhqH7kLg-ugRo,4495
|
|
||||||
cffi/recompiler.py,sha256=G3SqywHi2evVNV_nkHbNmbtHqg-WHLgBn8Silyo028o,64769
|
|
||||||
cffi/setuptools_ext.py,sha256=gtA1z8ujS8As3ysDUFrN4rNzw-02_M9kRrQveVNzfW0,9065
|
|
||||||
cffi/vengine_cpy.py,sha256=a-0zG9XdGF0aEkH01KeGLszETCGNt-PCuI6II-8TxBg,44390
|
|
||||||
cffi/vengine_gen.py,sha256=ZR3d1bEpeAM123Xev9SZeL8pYHdghYD3f3wpk2idr64,27351
|
|
||||||
cffi/verifier.py,sha256=La8rdbEkvdvbqAHDzTk5lsNUvdkqB_GcFnO7wXI6Mgk,11513
|
|
@ -1,3 +0,0 @@
|
|||||||
[distutils.setup_keywords]
|
|
||||||
cffi_modules = cffi.setuptools_ext:cffi_modules
|
|
||||||
|
|
@ -1,2 +0,0 @@
|
|||||||
_cffi_backend
|
|
||||||
cffi
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user