MCroll now reconigeses permanent=1 as non rollable

This commit is contained in:
officereso 2021-03-27 17:44:21 -05:00
parent dbebf53707
commit 4adb188d29
629 changed files with 277595 additions and 10 deletions

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,4 @@
<changelist name="Uncommitted_changes_before_Update_at_3_27_2021_4_05_PM_[Default_Changelist]" date="1616879178722" recycled="true" deleted="true">
<option name="PATH" value="$PROJECT_DIR$/.idea/shelf/Uncommitted_changes_before_Update_at_3_27_2021_4_05_PM_[Default_Changelist]/shelved.patch" />
<option name="DESCRIPTION" value="Uncommitted changes before Update at 3/27/2021 4:05 PM [Default Changelist]" />
</changelist>

View File

@ -12,7 +12,7 @@ password = open("../sqlPass.txt", 'r').read()
scheduler = AsyncIOScheduler({
'apscheduler.jobstores.default': {
'type': 'sqlalchemy',
'url': f'mysql+pymysql://discord_bot:tO81#1WAx&JM@192.168.1.52:5618/discord_bot?charset=utf8mb4',
'url': f'mysql+pymysql://quentin:{password}@192.168.1.52:5618/discord_bot?charset=utf8mb4',
},
'apscheduler.job_defaults.coalesce': 'True',
'apscheduler.timezone': 'America/Chicago'
@ -26,9 +26,9 @@ mcSelf = None
def get_con() -> pymysql.Connection:
return pymysql.connect(host='192.168.1.52',
port=5618,
user='discord_bot',
user='quentin',
password=f'{password}',
db='mc',
db='minecraft',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
@ -58,7 +58,7 @@ def reaction_to_serverip(reaction: Union[discord.Reaction, str]) -> int:
if isinstance(reaction.emoji, discord.Emoji) else reaction.emoji.encode('unicode_escape').decode('utf-8')
with con.cursor() as cursor:
cursor.execute("SELECT serverIP FROM mc.server_list WHERE reaction=%s;", reaction)
cursor.execute("SELECT serverIP FROM minecraft.server_list WHERE reaction=%s;", reaction)
serverIp = cursor.fetchone()
con.close()
return serverIp['serverIP']
@ -69,7 +69,7 @@ def get_choices() -> dict:
choices = {}
with con.cursor() as cursor:
cursor.execute("SELECT serverIP, serverName, lastActivated, reaction, getEmoji, onlyServer "
"FROM mc.server_list WHERE lastActivated is not null")
"FROM minecraft.server_list WHERE lastActivated is not null and permanent=0")
row = cursor.fetchone()
while row is not None:
row['votes'] = 0
@ -123,7 +123,7 @@ class McRoll(commands.Cog):
async def main_poll_recursion(self, choices, channelId):
self.pollMessageId = (await self.poll(choices, channelId)).id
scheduler.add_job(get_poll_results, 'date',
run_date=(datetime.now() + timedelta(days=1)),
run_date=(datetime.now() + timedelta(seconds=10)),
args=[choices, self.pollMessageId, channelId],
coalesce=True)
@ -153,10 +153,14 @@ class McRoll(commands.Cog):
timestamp=(datetime.utcnow() + timedelta(days=1)))
for choice in choices.values():
embed.add_field(
name=choice['serverName'] + ' ' + str(self.client.get_emoji(int(choice['reaction'])))
if choice['getEmoji']
else bytes(choice['reaction'], "utf-8").decode("unicode_escape"),
value="Multiplier : " + str(round(2 * math.log10(choice['lastActivated'] + 1) + 1, 2)) + '\n' + "Last Rolled : " + str(choice['lastActivated'] * 2) + " weeks ago.") + "\nThis is an only server, meaning if it wins it will be the only winner" if choice['onlyServer'] else ""
name=(choice['serverName'] + ' ' +
(str(self.client.get_emoji(int(choice['reaction'])))
if choice['getEmoji']
else bytes(choice['reaction'], "utf-8").decode("unicode_escape"))),
value=("Multiplier : " + str(round(2 * math.log10(choice['lastActivated'] + 1) + 1, 2)) +
'\n' + "Last Rolled : " + str(choice['lastActivated'] * 2) + " weeks ago." +
("\nThis is an only server, meaning if it wins it will be the only winner" if choice['onlyServer']
else " ")))
pollMessage = await channel.send(embed=embed)
await channel.send('@everyone')

View File

@ -0,0 +1,143 @@
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
typedef struct _greenlet {
PyObject_HEAD
char* stack_start;
char* stack_stop;
char* stack_copy;
intptr_t stack_saved;
struct _greenlet* stack_prev;
struct _greenlet* parent;
PyObject* run_info;
struct _frame* top_frame;
int recursion_depth;
PyObject* weakreflist;
#if PY_VERSION_HEX >= 0x030700A3
_PyErr_StackItem* exc_info;
_PyErr_StackItem exc_state;
#else
PyObject* exc_type;
PyObject* exc_value;
PyObject* exc_traceback;
#endif
PyObject* dict;
#if PY_VERSION_HEX >= 0x030700A3
PyObject* context;
#endif
} PyGreenlet;
#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type)
#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*)-1)
#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL)
#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL)
#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent)
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 8
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,19 @@
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
Copyright (c) Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,140 @@
Metadata-Version: 2.1
Name: APScheduler
Version: 3.7.0
Summary: In-process task scheduler with Cron-like capabilities
Home-page: https://github.com/agronholm/apscheduler
Author: Alex Grönholm
Author-email: apscheduler@nextday.fi
License: MIT
Keywords: scheduling cron
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
Requires-Dist: setuptools (>=0.7)
Requires-Dist: six (>=1.4.0)
Requires-Dist: pytz
Requires-Dist: tzlocal (~=2.0)
Requires-Dist: funcsigs ; python_version < "3.5"
Requires-Dist: futures ; python_version == "2.7"
Provides-Extra: asyncio
Requires-Dist: trollius ; (python_version == "2.7") and extra == 'asyncio'
Provides-Extra: doc
Requires-Dist: sphinx ; extra == 'doc'
Requires-Dist: sphinx-rtd-theme ; extra == 'doc'
Provides-Extra: gevent
Requires-Dist: gevent ; extra == 'gevent'
Provides-Extra: mongodb
Requires-Dist: pymongo (>=3.0) ; extra == 'mongodb'
Provides-Extra: redis
Requires-Dist: redis (>=3.0) ; extra == 'redis'
Provides-Extra: rethinkdb
Requires-Dist: rethinkdb (>=2.4.0) ; extra == 'rethinkdb'
Provides-Extra: sqlalchemy
Requires-Dist: sqlalchemy (>=0.8) ; extra == 'sqlalchemy'
Provides-Extra: testing
Requires-Dist: pytest (<6) ; extra == 'testing'
Requires-Dist: pytest-cov ; extra == 'testing'
Requires-Dist: pytest-tornado5 ; extra == 'testing'
Requires-Dist: mock ; (python_version == "2.7") and extra == 'testing'
Requires-Dist: pytest-asyncio (<0.6) ; (python_version == "3.4") and extra == 'testing'
Requires-Dist: pytest-asyncio ; (python_version >= "3.5") and extra == 'testing'
Provides-Extra: tornado
Requires-Dist: tornado (>=4.3) ; extra == 'tornado'
Provides-Extra: twisted
Requires-Dist: twisted ; extra == 'twisted'
Provides-Extra: zookeeper
Requires-Dist: kazoo ; extra == 'zookeeper'
.. image:: https://travis-ci.com/agronholm/apscheduler.svg?branch=master
:target: https://travis-ci.com/agronholm/apscheduler
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/apscheduler/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/apscheduler?branch=master
:alt: Code Coverage
Advanced Python Scheduler (APScheduler) is a Python library that lets you schedule your Python code
to be executed later, either just once or periodically. You can add new jobs or remove old ones on
the fly as you please. If you store your jobs in a database, they will also survive scheduler
restarts and maintain their state. When the scheduler is restarted, it will then run all the jobs
it should have run while it was offline [#f1]_.
Among other things, APScheduler can be used as a cross-platform, application specific replacement
to platform specific schedulers, such as the cron daemon or the Windows task scheduler. Please
note, however, that APScheduler is **not** a daemon or service itself, nor does it come with any
command line tools. It is primarily meant to be run inside existing applications. That said,
APScheduler does provide some building blocks for you to build a scheduler service or to run a
dedicated scheduler process.
APScheduler has three built-in scheduling systems you can use:
* Cron-style scheduling (with optional start/end times)
* Interval-based execution (runs jobs on even intervals, with optional start/end times)
* One-off delayed execution (runs jobs once, on a set date/time)
You can mix and match scheduling systems and the backends where the jobs are stored any way you
like. Supported backends for storing jobs include:
* Memory
* `SQLAlchemy <http://www.sqlalchemy.org/>`_ (any RDBMS supported by SQLAlchemy works)
* `MongoDB <http://www.mongodb.org/>`_
* `Redis <http://redis.io/>`_
* `RethinkDB <https://www.rethinkdb.com/>`_
* `ZooKeeper <https://zookeeper.apache.org/>`_
APScheduler also integrates with several common Python frameworks, like:
* `asyncio <http://docs.python.org/3.4/library/asyncio.html>`_ (:pep:`3156`)
* `gevent <http://www.gevent.org/>`_
* `Tornado <http://www.tornadoweb.org/>`_
* `Twisted <http://twistedmatrix.com/>`_
* `Qt <http://qt-project.org/>`_ (using either
`PyQt <http://www.riverbankcomputing.com/software/pyqt/intro>`_ ,
`PySide2 <https://wiki.qt.io/Qt_for_Python>`_ or
`PySide <http://qt-project.org/wiki/PySide>`_)
There are third party solutions for integrating APScheduler with other frameworks:
* `Django <https://github.com/jarekwg/django-apscheduler>`_
* `Flask <https://github.com/viniciuschiele/flask-apscheduler>`_
.. [#f1] The cutoff period for this is also configurable.
Documentation
-------------
Documentation can be found `here <http://readthedocs.org/docs/apscheduler/en/latest/>`_.
Source
------
The source can be browsed at `Github <https://github.com/agronholm/apscheduler>`_.
Reporting bugs
--------------
A `bug tracker <https://github.com/agronholm/apscheduler/issues>`_ is provided by Github.
Getting help
------------
If you have problems or other questions, you can either:
* Ask in the `apscheduler <https://gitter.im/apscheduler/Lobby>`_ room on Gitter
* Ask on the `APScheduler Google group <http://groups.google.com/group/apscheduler>`_, or
* Ask on `StackOverflow <http://stackoverflow.com/questions/tagged/apscheduler>`_ and tag your
question with the ``apscheduler`` tag

View File

@ -0,0 +1,84 @@
APScheduler-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
APScheduler-3.7.0.dist-info/LICENSE.txt,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130
APScheduler-3.7.0.dist-info/METADATA,sha256=nv0HEv4xOwQh-OD13duoBbl8yiRqrg2egM15MAbpoRk,5685
APScheduler-3.7.0.dist-info/RECORD,,
APScheduler-3.7.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
APScheduler-3.7.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
APScheduler-3.7.0.dist-info/entry_points.txt,sha256=7RgkYN_OYyCUQtIGhj-UNcelnIjsNm7nC9rogdMQh3U,1148
APScheduler-3.7.0.dist-info/top_level.txt,sha256=O3oMCWxG-AHkecUoO6Ze7-yYjWrttL95uHO8-RFdYvE,12
apscheduler/__init__.py,sha256=qFEK2ysRBcLiYmm3deyJJ1avUOugaM_nCGHMD42WMBw,380
apscheduler/__pycache__/__init__.cpython-36.pyc,,
apscheduler/__pycache__/events.cpython-36.pyc,,
apscheduler/__pycache__/job.cpython-36.pyc,,
apscheduler/__pycache__/util.cpython-36.pyc,,
apscheduler/events.py,sha256=KRMTDQUS6d2uVnrQvPoz3ZPV5V9XKsCAZLsgx913FFo,3593
apscheduler/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apscheduler/executors/__pycache__/__init__.cpython-36.pyc,,
apscheduler/executors/__pycache__/asyncio.cpython-36.pyc,,
apscheduler/executors/__pycache__/base.cpython-36.pyc,,
apscheduler/executors/__pycache__/base_py3.cpython-36.pyc,,
apscheduler/executors/__pycache__/debug.cpython-36.pyc,,
apscheduler/executors/__pycache__/gevent.cpython-36.pyc,,
apscheduler/executors/__pycache__/pool.cpython-36.pyc,,
apscheduler/executors/__pycache__/tornado.cpython-36.pyc,,
apscheduler/executors/__pycache__/twisted.cpython-36.pyc,,
apscheduler/executors/asyncio.py,sha256=ji5f6Qm2uGhov-3w52CXHZi8jc5U_gS56lisQylKTBQ,2087
apscheduler/executors/base.py,sha256=hogiMc_t-huw6BMod0HEeY2FhRNmAAUyNNuBHvIX31M,5336
apscheduler/executors/base_py3.py,sha256=8WOpTeX1NA-spdbEQ1oJMh5T2O_t2UdsaSnAh-iEWe0,1831
apscheduler/executors/debug.py,sha256=15_ogSBzl8RRCfBYDnkIV2uMH8cLk1KImYmBa_NVGpc,573
apscheduler/executors/gevent.py,sha256=aulrNmoefyBgrOkH9awRhFiXIDnSCnZ4U0o0_JXIXgc,777
apscheduler/executors/pool.py,sha256=6FEUTMl3WCRKebTqwCvpl7B2L9y0GYH16ULs4y_VNJg,2107
apscheduler/executors/tornado.py,sha256=DU75VaQ9R6nBuy8lbPUvDKUgsuJcZqwAvURC5vg3r6w,1780
apscheduler/executors/twisted.py,sha256=bRoU0C4BoVcS6_BjKD5wfUs0IJpGkmLsRAcMH2rJJss,778
apscheduler/job.py,sha256=JCRERBpfWLuomPiNNHX-jrluEwfHkdscEmz4i0Y8rao,11216
apscheduler/jobstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apscheduler/jobstores/__pycache__/__init__.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/base.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/memory.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/mongodb.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/redis.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/rethinkdb.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/sqlalchemy.cpython-36.pyc,,
apscheduler/jobstores/__pycache__/zookeeper.cpython-36.pyc,,
apscheduler/jobstores/base.py,sha256=DXzSW9XscueHZHMvy1qFiG-vYqUl_MMv0n0uBSZWXGo,4523
apscheduler/jobstores/memory.py,sha256=ZxWiKsqfsCHFvac-6X9BztuhnuSxlOYi1dhT6g-pjQo,3655
apscheduler/jobstores/mongodb.py,sha256=JI6t3PanXsxHjj6fzxMdzcZBy5ytV84ZPU_WWB5zEA4,5335
apscheduler/jobstores/redis.py,sha256=kjQDIzPXz-Yq976U9HK3aMkcCI_QRLKgTADQWKewtik,5483
apscheduler/jobstores/rethinkdb.py,sha256=k1rSLYJqejuhQxJY3pXwHAQYcpZ1QFJsoQ8n0oEu5MM,5863
apscheduler/jobstores/sqlalchemy.py,sha256=alfkAEIzwSJKbYYXGc4G8DTeBsLdGhtac8ebjYvoVT0,6134
apscheduler/jobstores/zookeeper.py,sha256=avGLXaJGjHD0F7uG6rLJ2gg_TXNqXDEM4PqOu56f-Xg,6363
apscheduler/schedulers/__init__.py,sha256=jM63xA_K7GSToBenhsz-SCcqfhk1pdEVb6ajwoO5Kqg,406
apscheduler/schedulers/__pycache__/__init__.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/asyncio.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/background.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/base.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/blocking.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/gevent.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/qt.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/tornado.cpython-36.pyc,,
apscheduler/schedulers/__pycache__/twisted.cpython-36.pyc,,
apscheduler/schedulers/asyncio.py,sha256=BuSE8re61ytQtUNRw197iJdhOepaIexr3-WYEDAjhR0,2230
apscheduler/schedulers/background.py,sha256=751p-f5Di6pY4x6UXlZggpxQ5k2ObJ_Q5wSeWmKHS8o,1566
apscheduler/schedulers/base.py,sha256=gvC0rjzSljDPxn0iGu-DyCy-9Fh48NJoY4-VeoZzDZo,43237
apscheduler/schedulers/blocking.py,sha256=8nubfJ4PoUnAkEY6WRQG4COzG4SxGyW9PjuVPhDAbsk,985
apscheduler/schedulers/gevent.py,sha256=csPBvV75FGcboXXsdex6fCD7J54QgBddYNdWj62ZO9g,1031
apscheduler/schedulers/qt.py,sha256=vH2ZHL0PkFTcnHfnsU_tnI3u7bVQ81yy2v_is-575jQ,1463
apscheduler/schedulers/tornado.py,sha256=D9Vaq3Ee9EFiXa1jDy9tedI048gR_YT_LAFUWqO_uEw,1926
apscheduler/schedulers/twisted.py,sha256=D5EBjjMRtMBxy0_aAURcULAI8Ky2IvCTr9tK9sO1rYk,1844
apscheduler/triggers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
apscheduler/triggers/__pycache__/__init__.cpython-36.pyc,,
apscheduler/triggers/__pycache__/base.cpython-36.pyc,,
apscheduler/triggers/__pycache__/combining.cpython-36.pyc,,
apscheduler/triggers/__pycache__/date.cpython-36.pyc,,
apscheduler/triggers/__pycache__/interval.cpython-36.pyc,,
apscheduler/triggers/base.py,sha256=BvBJdOnIeVClXPXeInzYK25cN64jAc4a9IiEQucSiVk,1355
apscheduler/triggers/combining.py,sha256=klaSoBp1kyrPX5D3gBpNTlsGKjks5QeKPW5JN_MVs30,3449
apscheduler/triggers/cron/__init__.py,sha256=XVdyymFsnXQMw57gpU1M0vmJo5oGdQ_t8BAkhEFNMFA,9236
apscheduler/triggers/cron/__pycache__/__init__.cpython-36.pyc,,
apscheduler/triggers/cron/__pycache__/expressions.cpython-36.pyc,,
apscheduler/triggers/cron/__pycache__/fields.cpython-36.pyc,,
apscheduler/triggers/cron/expressions.py,sha256=hu1kq0mKvivIw7U0D0Nnrbuk3q01dCuhZ7SHRPw6qhI,9184
apscheduler/triggers/cron/fields.py,sha256=NWPClh1NgSOpTlJ3sm1TXM_ViC2qJGKWkd_vg0xsw7o,3510
apscheduler/triggers/date.py,sha256=RrfB1PNO9G9e91p1BOf-y_TseVHQQR-KJPhNdPpAHcU,1705
apscheduler/triggers/interval.py,sha256=M4y6APgeP9DEh_XKHbFcaUKFk7wAb0-XoSO50bR4qXc,4369
apscheduler/util.py,sha256=3gKKUbW2fyEa5LkYCGHnaWSk2jqj_HreQbE4SrJiDxc,14087

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View File

@ -0,0 +1,24 @@
[apscheduler.executors]
asyncio = apscheduler.executors.asyncio:AsyncIOExecutor [asyncio]
debug = apscheduler.executors.debug:DebugExecutor
gevent = apscheduler.executors.gevent:GeventExecutor [gevent]
processpool = apscheduler.executors.pool:ProcessPoolExecutor
threadpool = apscheduler.executors.pool:ThreadPoolExecutor
tornado = apscheduler.executors.tornado:TornadoExecutor [tornado]
twisted = apscheduler.executors.twisted:TwistedExecutor [twisted]
[apscheduler.jobstores]
memory = apscheduler.jobstores.memory:MemoryJobStore
mongodb = apscheduler.jobstores.mongodb:MongoDBJobStore [mongodb]
redis = apscheduler.jobstores.redis:RedisJobStore [redis]
rethinkdb = apscheduler.jobstores.rethinkdb:RethinkDBJobStore [rethinkdb]
sqlalchemy = apscheduler.jobstores.sqlalchemy:SQLAlchemyJobStore [sqlalchemy]
zookeeper = apscheduler.jobstores.zookeeper:ZooKeeperJobStore [zookeeper]
[apscheduler.triggers]
and = apscheduler.triggers.combining:AndTrigger
cron = apscheduler.triggers.cron:CronTrigger
date = apscheduler.triggers.date:DateTrigger
interval = apscheduler.triggers.interval:IntervalTrigger
or = apscheduler.triggers.combining:OrTrigger

View File

@ -0,0 +1 @@
apscheduler

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,19 @@
Copyright (c) 2010, 2013 PyMySQL contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,180 @@
Metadata-Version: 2.1
Name: PyMySQL
Version: 1.0.2
Summary: Pure Python MySQL Driver
Home-page: https://github.com/PyMySQL/PyMySQL/
Author: yutaka.matsubara
Author-email: yutaka.matsubara@gmail.com
Maintainer: Inada Naoki
Maintainer-email: songofacandy@gmail.com
License: "MIT"
Project-URL: Documentation, https://pymysql.readthedocs.io/
Keywords: MySQL
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Database
Requires-Python: >=3.6
Provides-Extra: ed25519
Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
Provides-Extra: rsa
Requires-Dist: cryptography ; extra == 'rsa'
.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest
:target: https://pymysql.readthedocs.io/
:alt: Documentation Status
.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master
.. image:: https://img.shields.io/lgtm/grade/python/g/PyMySQL/PyMySQL.svg?logo=lgtm&logoWidth=18
:target: https://lgtm.com/projects/g/PyMySQL/PyMySQL/context:python
PyMySQL
=======
.. contents:: Table of Contents
:local:
This package contains a pure-Python MySQL client library, based on `PEP 249`_.
Most public APIs are compatible with mysqlclient and MySQLdb.
NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`,
`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_.
But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover
their usecase.
.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/
Requirements
-------------
* Python -- one of the following:
- CPython_ : 3.6 and newer
- PyPy_ : Latest 3.x version
* MySQL Server -- one of the following:
- MySQL_ >= 5.6
- MariaDB_ >= 10.0
.. _CPython: https://www.python.org/
.. _PyPy: https://pypy.org/
.. _MySQL: https://www.mysql.com/
.. _MariaDB: https://mariadb.org/
Installation
------------
Package is uploaded on `PyPI <https://pypi.org/project/PyMySQL>`_.
You can install it with pip::
$ python3 -m pip install PyMySQL
To use "sha256_password" or "caching_sha2_password" for authenticate,
you need to install additional dependency::
$ python3 -m pip install PyMySQL[rsa]
To use MariaDB's "ed25519" authentication method, you need to install
additional dependency::
$ python3 -m pip install PyMySQL[ed25519]
Documentation
-------------
Documentation is available online: https://pymysql.readthedocs.io/
For support, please refer to the `StackOverflow
<https://stackoverflow.com/questions/tagged/pymysql>`_.
Example
-------
The following examples make use of a simple table
.. code:: sql
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(255) COLLATE utf8_bin NOT NULL,
`password` varchar(255) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin
AUTO_INCREMENT=1 ;
.. code:: python
import pymysql.cursors
# Connect to the database
connection = pymysql.connect(host='localhost',
user='user',
password='passwd',
database='db',
cursorclass=pymysql.cursors.DictCursor)
with connection:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
cursor.execute(sql, ('webmaster@python.org',))
result = cursor.fetchone()
print(result)
This example will print:
.. code:: python
{'password': 'very-secret', 'id': 1}
Resources
---------
* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/
* MySQL Reference Manuals: https://dev.mysql.com/doc/
* MySQL client/server protocol:
https://dev.mysql.com/doc/internals/en/client-server-protocol.html
* "Connector" channel in MySQL Community Slack:
https://lefred.be/mysql-community-on-slack/
* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users
License
-------
PyMySQL is released under the MIT License. See LICENSE for more information.

View File

@ -0,0 +1,43 @@
PyMySQL-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyMySQL-1.0.2.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
PyMySQL-1.0.2.dist-info/METADATA,sha256=hz4Fdo8sOFKcNqZ8wp4Bp-txNCOBCnw9-leYR7QBZ5I,5119
PyMySQL-1.0.2.dist-info/RECORD,,
PyMySQL-1.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
PyMySQL-1.0.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
PyMySQL-1.0.2.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
pymysql/__init__.py,sha256=XL7skPUK4cbKiek68T0vMob-L4YkIRLb2KX4hdMZVvM,4391
pymysql/__pycache__/__init__.cpython-36.pyc,,
pymysql/__pycache__/_auth.cpython-36.pyc,,
pymysql/__pycache__/charset.cpython-36.pyc,,
pymysql/__pycache__/connections.cpython-36.pyc,,
pymysql/__pycache__/converters.cpython-36.pyc,,
pymysql/__pycache__/cursors.cpython-36.pyc,,
pymysql/__pycache__/err.cpython-36.pyc,,
pymysql/__pycache__/optionfile.cpython-36.pyc,,
pymysql/__pycache__/protocol.cpython-36.pyc,,
pymysql/__pycache__/times.cpython-36.pyc,,
pymysql/_auth.py,sha256=l1VtBwDpCtTkalgYQFASO-rj-vEd3DGYR8g-eQjNF1U,7399
pymysql/charset.py,sha256=JCvshFnNf4vzkpXc6uPCyg07qGNfZaVZoxrFqzVlKFs,10293
pymysql/connections.py,sha256=EwKWqFIWlx6kbOeDFIhMFpjJ9-pyF140E5ouKgrrYfY,51251
pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878
pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679
pymysql/constants/CR.py,sha256=oHyD9dnR1DUX7hd42rcamMnFrWhjUZz7E4S6qQWSQb4,1927
pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333
pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pymysql/constants/__pycache__/CLIENT.cpython-36.pyc,,
pymysql/constants/__pycache__/COMMAND.cpython-36.pyc,,
pymysql/constants/__pycache__/CR.cpython-36.pyc,,
pymysql/constants/__pycache__/ER.cpython-36.pyc,,
pymysql/constants/__pycache__/FIELD_TYPE.cpython-36.pyc,,
pymysql/constants/__pycache__/FLAG.cpython-36.pyc,,
pymysql/constants/__pycache__/SERVER_STATUS.cpython-36.pyc,,
pymysql/constants/__pycache__/__init__.cpython-36.pyc,,
pymysql/converters.py,sha256=MBXTOCXSyewMculaRliBEzPVkOKXLiRMqvIXih9Akrg,9430
pymysql/cursors.py,sha256=1E79f3vysxygyfZMhvR6-yFDfysRn3Go8xZTywteh4o,15366
pymysql/err.py,sha256=bpxayM4IUnFQAd8bUZ3PFsFomi9QSfBk-0TJXyKU2FI,3773
pymysql/optionfile.py,sha256=ehPrZW4d7pcEvXGAEpsKgLdXpFnIQD93yF7T_jHjoRk,573
pymysql/protocol.py,sha256=Ur8xXkVvyFc6m5CA34QrHBasADvS_NPFsWU-Q3flRYA,11859
pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1 @@
pymysql

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,19 @@
Copyright 2005-2021 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,234 @@
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 1.4.3
Summary: Database Abstraction Library
Home-page: http://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7
Description-Content-Type: text/x-rst
Requires-Dist: importlib-metadata ; python_version < "3.8"
Requires-Dist: greenlet (!=0.4.17) ; python_version >= "3"
Provides-Extra: aiomysql
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiomysql'
Requires-Dist: aiomysql ; (python_version >= "3") and extra == 'aiomysql'
Provides-Extra: aiosqlite
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiosqlite'
Requires-Dist: aiosqlite ; (python_version >= "3") and extra == 'aiosqlite'
Provides-Extra: asyncio
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncio'
Provides-Extra: mariadb_connector
Requires-Dist: mariadb (>=1.0.1) ; (python_version >= "3") and extra == 'mariadb_connector'
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mypy
Requires-Dist: sqlalchemy2-stubs ; extra == 'mypy'
Requires-Dist: mypy (>=0.800) ; (python_version >= "3") and extra == 'mypy'
Provides-Extra: mysql
Requires-Dist: mysqlclient (<2,>=1.4.0) ; (python_version < "3") and extra == 'mysql'
Requires-Dist: mysqlclient (>=1.4.0) ; (python_version >= "3") and extra == 'mysql'
Provides-Extra: mysql_connector
Requires-Dist: mysqlconnector ; extra == 'mysql_connector'
Provides-Extra: oracle
Requires-Dist: cx-oracle (<8,>=7) ; (python_version < "3") and extra == 'oracle'
Requires-Dist: cx-oracle (>=7) ; (python_version >= "3") and extra == 'oracle'
Provides-Extra: postgresql
Requires-Dist: psycopg2 (>=2.7) ; extra == 'postgresql'
Provides-Extra: postgresql_asyncpg
Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'postgresql_asyncpg'
Requires-Dist: asyncpg ; (python_version >= "3") and extra == 'postgresql_asyncpg'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 (>=1.16.6) ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: pymysql
Requires-Dist: pymysql (<1) ; (python_version < "3") and extra == 'pymysql'
Requires-Dist: pymysql ; (python_version >= "3") and extra == 'pymysql'
Provides-Extra: sqlcipher
Requires-Dist: sqlcipher3-binary ; (python_version >= "3") and extra == 'sqlcipher'
SQLAlchemy
==========
|PyPI| |Python| |Downloads|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Python Version
.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Downloads
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-
initiated decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
http://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <http://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <http://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <http://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<http://www.opensource.org/licenses/mit-license.php>`_.

View File

@ -0,0 +1,472 @@
SQLAlchemy-1.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
SQLAlchemy-1.4.3.dist-info/LICENSE,sha256=YBpAuebmf1_VblyrYHwFdqeATqzBxJ-T6h8-e4s2zW4,1119
SQLAlchemy-1.4.3.dist-info/METADATA,sha256=aGZ4yMmb_jqSIznqHtvGILMtIfl5aZErIRjxTybO1o8,9385
SQLAlchemy-1.4.3.dist-info/RECORD,,
SQLAlchemy-1.4.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
SQLAlchemy-1.4.3.dist-info/WHEEL,sha256=epucrC2yyYTysDCMzXuz8eGMTMKryzRfNOvMGdslbjc,101
SQLAlchemy-1.4.3.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
sqlalchemy/__init__.py,sha256=W5VArm2RGv58cC1hc9zwPsG8svMpa4ZIS5hkg3j7ips,4240
sqlalchemy/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/__pycache__/events.cpython-36.pyc,,
sqlalchemy/__pycache__/exc.cpython-36.pyc,,
sqlalchemy/__pycache__/inspection.cpython-36.pyc,,
sqlalchemy/__pycache__/log.cpython-36.pyc,,
sqlalchemy/__pycache__/processors.cpython-36.pyc,,
sqlalchemy/__pycache__/schema.cpython-36.pyc,,
sqlalchemy/__pycache__/types.cpython-36.pyc,,
sqlalchemy/cimmutabledict.cp36-win_amd64.pyd,sha256=tEiUr5jVBUrdtKYoud6NWC8x6JCibiM0MMJ026Yae8c,14848
sqlalchemy/connectors/__init__.py,sha256=TBW6z0L_8lga2UXWZBC2kXFkmBj1tyyHGj2zPiwVTnA,288
sqlalchemy/connectors/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/connectors/__pycache__/mxodbc.cpython-36.pyc,,
sqlalchemy/connectors/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/connectors/mxodbc.py,sha256=YPVxoPpPZyCGgZuNG-PXh6vF_y4BT2IJTbhSoSUFgG0,5948
sqlalchemy/connectors/pyodbc.py,sha256=-9snrm-iRg68Zf-3HBGDvJqFQlK17lku5zL03KJbB0I,7005
sqlalchemy/cprocessors.cp36-win_amd64.pyd,sha256=cPRGY-Q4y8hVOsgr40u5UzGfBcEiS4NJ7P0ytMka3ms,17408
sqlalchemy/cresultproxy.cp36-win_amd64.pyd,sha256=GkgTCmTNx8xH4LJJD4z-P4gRjyqgsWNaWxMYqZZVkIw,21504
sqlalchemy/databases/__init__.py,sha256=Wr0SaNwZ_U8v65tQ9b0kL8hcpsCMCQ9f8dnfLjg9z1M,1047
sqlalchemy/databases/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/__init__.py,sha256=Q9SoXJ9mpShD_4lbIXI6wjJqJsmpmDgS-yndzS1aauU,2156
sqlalchemy/dialects/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__init__.py,sha256=99IsIXbvMTkG2KkeQDdsuxdM5eonmd8kN_7_Hwew1TA,1193
sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-36.pyc,,
sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-36.pyc,,
sqlalchemy/dialects/firebird/base.py,sha256=lp3OUbW9ne4oItoKOCyX36Vpvf4IKpIELW9MrwEz-i0,32075
sqlalchemy/dialects/firebird/fdb.py,sha256=eD-ckC4i1x_AFrNaEIcibrYmfa6AJkZFOMKWVnIKd0k,4190
sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=MTIvingeuEakZi0aNjJskHyjYiwKXb0UbJryVKAmivg,6638
sqlalchemy/dialects/mssql/__init__.py,sha256=8fhQ2dxcuRwcdYrdbI9nzVAel5nNJotnV1btviNhIag,1872
sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-36.pyc,,
sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/dialects/mssql/base.py,sha256=kIIaZduBB1TtzoJxZfmeIGG5BfwqIy7qCM9EzXYp8X4,109515
sqlalchemy/dialects/mssql/information_schema.py,sha256=OjFc7KP_xCIyIdtjEnh0rTk40O5MOiAJWQ-rd463WvQ,7773
sqlalchemy/dialects/mssql/json.py,sha256=bAi9z-htrC6adJUiBRetVj3RwtJ2GNN5I_KGeHFL8_s,4683
sqlalchemy/dialects/mssql/mxodbc.py,sha256=VCGmH5eUeKX6R1CGnk0XVyNue-ZhHipSVn28gADQAqM,4919
sqlalchemy/dialects/mssql/provision.py,sha256=oTGw0l5RNcXIWkA0s2erVgk2XKiNw26IRzZE84tN2uE,4371
sqlalchemy/dialects/mssql/pymssql.py,sha256=nWiPy6VORFGr0JLxCdkrcxtY543mIhgEmWPGT_2YjAc,4939
sqlalchemy/dialects/mssql/pyodbc.py,sha256=Lh2WtJ3Hn7CbachPNnhelescpRn1ope_2fJ2L-jzb98,20669
sqlalchemy/dialects/mysql/__init__.py,sha256=iVMCrbEXTYcQ_Cl6MpXNCk-W3U4vdEFjTz8DKP3xGoc,2216
sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-36.pyc,,
sqlalchemy/dialects/mysql/__pycache__/types.cpython-36.pyc,,
sqlalchemy/dialects/mysql/aiomysql.py,sha256=84s7iGF2tXRT3p2nde7cz9NczY7u_Kxe87pNYphuxP0,9421
sqlalchemy/dialects/mysql/base.py,sha256=UEf6ZEZ2jcnTLHxRWKS90QmQ8AjLDINeD__DAXVDrHw,118919
sqlalchemy/dialects/mysql/cymysql.py,sha256=Ki9X8FrrgdVGFurjXgAuDSmflPzTJp05CeYWNTl_6M8,2315
sqlalchemy/dialects/mysql/dml.py,sha256=eTV2FwYPMw3C7zC039pM7th68ASwf-A0Yco9a2VtQoc,5630
sqlalchemy/dialects/mysql/enumerated.py,sha256=rMAczGmN_H4DtbmhtdRQxSnPbJQEj4qZn9Kk2V2aZlI,9337
sqlalchemy/dialects/mysql/json.py,sha256=iEKILiRmIpakQcqJt2OzfmVGFsgSyi7gBg63khPggu4,2396
sqlalchemy/dialects/mysql/mariadb.py,sha256=8mUqaCtI2ZA4nAhkbV9b0ps4UosOMHSXZ22mCUldrVk,454
sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=WFcJM1UGbRWCWFIS2nL3BqR7iN_IiOLkT2CF4_ZxmfQ,7718
sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=NZLoanCTCZBLRvxhLBy6BftD0XzZXhPXI23KvdRcP6k,7892
sqlalchemy/dialects/mysql/mysqldb.py,sha256=TlXLC1ewlckI6xbVe7EhNpN7oflKZc2g_MMrTnU1Xps,8952
sqlalchemy/dialects/mysql/oursql.py,sha256=5P8Ap8n2odb4g4ac0z94ujxPxUWV-2yzxe6rleXntXQ,8757
sqlalchemy/dialects/mysql/provision.py,sha256=-gCG7MwEMtztSmNU107Q563t-nN-oqTVkCiay-HRRQI,2727
sqlalchemy/dialects/mysql/pymysql.py,sha256=xV-HhwaCllTO40Jre9Lt-5xN2-4EZqvW6_Ud5Hu8AzY,2630
sqlalchemy/dialects/mysql/pyodbc.py,sha256=apawQOc6Y7OwlMgBwDus01tar08JzBXP_7wXbm1LnzY,4600
sqlalchemy/dialects/mysql/reflection.py,sha256=0E17NOgV29EJks_-5Q0n5Hif9Izmx71mb3UKkVYKbQo,19110
sqlalchemy/dialects/mysql/types.py,sha256=hpLXvadrRtxY3MxunTFKZRRFoGl4CMj0PBtMMTIJ8F4,25362
sqlalchemy/dialects/oracle/__init__.py,sha256=z_sm511AIB30Vuv1Es-hfkRMHe5-yeqNGpnzE_XkPPo,1286
sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-36.pyc,,
sqlalchemy/dialects/oracle/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/oracle/base.py,sha256=IhTLBydyx8e5S6SGMz2YLvALiDizWOejHv8Es8MpscQ,89290
sqlalchemy/dialects/oracle/cx_oracle.py,sha256=DjMCTjX_z26o5CYA7UY0hKUjaa0yS34hveSfu0wKtt4,50417
sqlalchemy/dialects/oracle/provision.py,sha256=HCi5hs5fv6rXv-lVQir6gsKrhes9NoAWlChzR3ueq74,5965
sqlalchemy/dialects/postgresql/__init__.py,sha256=WraAOkI7m26v85Pf28M2ONh-3j4SduvqFdefDPpc3EM,2625
sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/array.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-36.pyc,,
sqlalchemy/dialects/postgresql/array.py,sha256=3WQ2yBDAhms40MhJ683FELKmN9TL8XLpXDj3-TIaASo,12615
sqlalchemy/dialects/postgresql/asyncpg.py,sha256=qAh38Xxw66N9A3y9R1d7xiWRFdUKKnfpBKRNO6O5y4Y,34279
sqlalchemy/dialects/postgresql/base.py,sha256=PvqMJNitHLcn4ytuvGOX635QuzromQ0GDWcp054UymA,155134
sqlalchemy/dialects/postgresql/dml.py,sha256=jBXY4DPq1tWG7gA2zfqZ3bfAuOZqaJH2PWQdVXJKnLs,9097
sqlalchemy/dialects/postgresql/ext.py,sha256=K5huWreKbPLRVuwF7gFRhSPG0CY5sdXF0LiFFmDpMbY,8656
sqlalchemy/dialects/postgresql/hstore.py,sha256=obkZOC84OYVWUC8O4PyMlEGifJAA-AjsgINtrkOj0Ac,12837
sqlalchemy/dialects/postgresql/json.py,sha256=szREjVPlVfpwAc5D6tHqNCBL2UHIfr6yk_CgZqZQ10A,10777
sqlalchemy/dialects/postgresql/pg8000.py,sha256=_dWwkOG4SNk4CL1wIGIXzwjrRVS0iUQbFBLG-5B0A08,14384
sqlalchemy/dialects/postgresql/provision.py,sha256=egg9w4wp3N3hGUAxhS8ZFeZgFQUIOOCxbJA0CidyflQ,4471
sqlalchemy/dialects/postgresql/psycopg2.py,sha256=DJ_O3FOHkEgeWkiZfxulFdBdZtHPWZlxz-lxguxZi6I,37298
sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=bVmuI7ZqVSp5sJa4Ar5KA9hYQsyD5FURKw_OF2LCylw,1716
sqlalchemy/dialects/postgresql/pygresql.py,sha256=1GuxYJvPLufE3RmghAjuVfGEeEILDk9g7YIGKT3gAEI,8824
sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=nAg2-bjBkH_Vl8weQjnnIWXVLqs19kulsrbByT1aj90,3780
sqlalchemy/dialects/postgresql/ranges.py,sha256=b04r-iDteuTMhmVAc5Yf6cGraGTFggZTi0hx61_P3ng,4762
sqlalchemy/dialects/sqlite/__init__.py,sha256=rPgXiMXoA6X1_aeqq4ABU6QDqhGfIHcBaUY89utTQQw,1255
sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/json.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-36.pyc,,
sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=zvuu2Dpu9xVB8bCyp2cAfLOuE97vZbmvUPTo27g6hpA,10142
sqlalchemy/dialects/sqlite/base.py,sha256=YiIly9RzKi2gWe4VySBoUJjcAWCW5_jLObKUKcbfWaY,90211
sqlalchemy/dialects/sqlite/dml.py,sha256=1hVqN_Eel90X3M3Sp628q8Eqj-cD9dT1lcRMED60_cg,6311
sqlalchemy/dialects/sqlite/json.py,sha256=bz_1axFG5YI9kLszE-oiCN3-z95zIPPcLgVwug_-AT4,2602
sqlalchemy/dialects/sqlite/provision.py,sha256=3F5ZX2dYGMFAGra99UVavCihth1_XazJXX9XAet8gbw,4818
sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=wkzVT4Jh6jP4j3LhzUmOhFUedBJkcSLx1898zyoakxI,5433
sqlalchemy/dialects/sqlite/pysqlite.py,sha256=P9J8b5XfAqe_H-JiopCFNV5cysrDnIRLVHeg064cKNc,23762
sqlalchemy/dialects/sybase/__init__.py,sha256=L1T7DkOKeeT4rs_-HMQtT8a6L_yeHNJqtTi8MGd_pbw,1430
sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/base.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-36.pyc,,
sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-36.pyc,,
sqlalchemy/dialects/sybase/base.py,sha256=iCUMe4RRXXdSUAT7h3-aW5GIk3uJGlgHF4ClTKva7CU,33409
sqlalchemy/dialects/sybase/mxodbc.py,sha256=8ovN_-cbO84AfkVv23MYWTkGnS1yASywAvRBJqzy-c0,934
sqlalchemy/dialects/sybase/pyodbc.py,sha256=9KCscDljWVslHRtO2BGYO4ymef_SNP3pKvumeJxIvCU,2284
sqlalchemy/dialects/sybase/pysybase.py,sha256=nHDwW8fgObFN3FpiOmS5ZNZAgg9IeXgFxPCGvH7jRNU,3435
sqlalchemy/engine/__init__.py,sha256=x8TozgrDsPtS2j-9ByAv0ACrcpUMiFJMpxc0lsGQmo8,2092
sqlalchemy/engine/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/base.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/characteristics.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/create.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/cursor.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/default.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/events.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/interfaces.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/mock.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/reflection.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/result.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/row.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/strategies.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/url.cpython-36.pyc,,
sqlalchemy/engine/__pycache__/util.cpython-36.pyc,,
sqlalchemy/engine/base.py,sha256=kAdtEmeeHwSPKY_5spjXEU8CoeQAcIo46fFevspAsoA,116490
sqlalchemy/engine/characteristics.py,sha256=DrhLcmpnkMpmgo_kgQpcujoD4nW81vTt-GkuWqW9QaY,1873
sqlalchemy/engine/create.py,sha256=CNlAgKCKeGS92uevCf_t3KqgcsRRV3jut1eImoZg2wo,30443
sqlalchemy/engine/cursor.py,sha256=pupdrRq0rjmvSaDc5eqGFH5H2g7GZjyRz1qHvFZYX3k,66728
sqlalchemy/engine/default.py,sha256=TAyEnSHupRkVT79MNcQg76aF8e4dJHBkFsVSJbQTqKs,64416
sqlalchemy/engine/events.py,sha256=Hv0GAy-w8m9xdq8E6vZz0fSuLgbZ_9uXMVO3N-HavN4,33483
sqlalchemy/engine/interfaces.py,sha256=yhueJLG1fTCYRD5mF3bRNRyxP_GFg7sRCoVx1haJJAE,59563
sqlalchemy/engine/mock.py,sha256=eXuMRQawEZFJWpdTELQcljPq5guokWSW9koqySGL-90,3743
sqlalchemy/engine/reflection.py,sha256=C8KNhqjHMV_Nq0poMeTC7zMe6x0IDP0_aLVv1FL2vRU,39647
sqlalchemy/engine/result.py,sha256=887QOHR5mI1kVJkAHpPgHhU_a53xlxR6rCE_lBHUzkg,55170
sqlalchemy/engine/row.py,sha256=8wFKjU5NCYIWYG5lEfwIp7Zdc0XsYr6jJMm4Ua92xMk,17810
sqlalchemy/engine/strategies.py,sha256=Qqx7Nw6TPJe6KzM58oPHavzbSqaVST4DQGrhTO42R14,430
sqlalchemy/engine/url.py,sha256=6aCchFoBvytcEAinnrC3FMOenH5ByDeBjYMqWC574AI,27299
sqlalchemy/engine/util.py,sha256=YQ08ar9GluoSh_a72wMFlobu6j7e_Joz8k7tPv5gnG0,5007
sqlalchemy/event/__init__.py,sha256=UEArBlnnF0If5n_aohWAmYx3Jjoym-8bBgyZ0Fp7hMI,533
sqlalchemy/event/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/event/__pycache__/api.cpython-36.pyc,,
sqlalchemy/event/__pycache__/attr.cpython-36.pyc,,
sqlalchemy/event/__pycache__/base.cpython-36.pyc,,
sqlalchemy/event/__pycache__/legacy.cpython-36.pyc,,
sqlalchemy/event/__pycache__/registry.cpython-36.pyc,,
sqlalchemy/event/api.py,sha256=zmVEc1OZ2IHUHxRk7QBIQj12NKkXA1_tedtPq1xWpu4,6994
sqlalchemy/event/attr.py,sha256=LJp2lIAWoyPwk99AGy5vYpxd5NClwoXSckiORCicY7w,14268
sqlalchemy/event/base.py,sha256=uL2Vvau-A6b5W9IREO2_jfSz78aP_QH8cjglIbaTBNY,11280
sqlalchemy/event/legacy.py,sha256=spFIf45jAL4hi4ah9_ttIbj0hzpgq5bJlv4lpoA7eRs,6454
sqlalchemy/event/registry.py,sha256=p1ixAawLN7Lssif73SjTsPKHNU6fhTJOwrVtCrSfifE,8567
sqlalchemy/events.py,sha256=08BdPP_ASWqos10aJ_grM8qKKsjKFIupUIwqt3kaH-c,480
sqlalchemy/exc.py,sha256=bjWfDR8PYR_s-q8bI8R19utY1i8tDhOBg17MGYe7pIA,20391
sqlalchemy/ext/__init__.py,sha256=FFnko4I2iMY2OaeAIJdsu200D68cAfubuqC_2Eeg8BM,332
sqlalchemy/ext/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/associationproxy.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/automap.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/baked.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/compiler.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/horizontal_shard.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/hybrid.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/indexable.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/instrumentation.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/mutable.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/orderinglist.cpython-36.pyc,,
sqlalchemy/ext/__pycache__/serializer.cpython-36.pyc,,
sqlalchemy/ext/associationproxy.py,sha256=QoEz-UR5j43ZnqNl13gZp-cMpdBS7ldv1Wtay-EkTNo,51568
sqlalchemy/ext/asyncio/__init__.py,sha256=ZVLUj6bftLh9qlYSbd4JyAqZWcB2uEAaj01pBUICKBA,676
sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/ext/asyncio/__pycache__/base.cpython-36.pyc,,
sqlalchemy/ext/asyncio/__pycache__/engine.cpython-36.pyc,,
sqlalchemy/ext/asyncio/__pycache__/events.cpython-36.pyc,,
sqlalchemy/ext/asyncio/__pycache__/exc.cpython-36.pyc,,
sqlalchemy/ext/asyncio/__pycache__/result.cpython-36.pyc,,
sqlalchemy/ext/asyncio/__pycache__/session.cpython-36.pyc,,
sqlalchemy/ext/asyncio/base.py,sha256=hiXpDgIjnXnzIleYZrb9GhLcYc_b3ARiPuyBsirtWkk,1027
sqlalchemy/ext/asyncio/engine.py,sha256=5VLSudvun_PMd-eZlIgRYoYTK2hfSqFjoY3ecpROjQA,23820
sqlalchemy/ext/asyncio/events.py,sha256=mXQgkDLsnS6qkQeB_GV-i6VhtmNAve85B-jIGs7cRyg,1270
sqlalchemy/ext/asyncio/exc.py,sha256=0gqT7n-U-p7rLbkzAQKfzQ6_jBZxHweK-2mXFWVYvec,659
sqlalchemy/ext/asyncio/result.py,sha256=tm1yk0kqZN5UDh0KCOMh2_DJThaKHoewXj8ClIw3tYw,22009
sqlalchemy/ext/asyncio/session.py,sha256=pifHoNNYrzNcrlFKYTxpwQ8fFaUTcTCMJH60MFroe0Y,12645
sqlalchemy/ext/automap.py,sha256=566kwV-DciCyj3i9kls3lFpkzY4GVWvMcuOutfsawz4,46225
sqlalchemy/ext/baked.py,sha256=Yc9t2UY5c8Fu4v_yDWInPFZBeLi657uwXVpGr8g8dwo,20616
sqlalchemy/ext/compiler.py,sha256=Xfq_ONWDQ_mhHNSHH_AY5eIhDF4OEQwqc5018l9fQxU,18496
sqlalchemy/ext/declarative/__init__.py,sha256=4lGjX9yq22BJ3K7ZGvFF_46oWoYStN9pkIKypXNavNQ,1905
sqlalchemy/ext/declarative/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/ext/declarative/__pycache__/extensions.cpython-36.pyc,,
sqlalchemy/ext/declarative/extensions.py,sha256=iMCJy8NAXptisM7dCz_Vqp4Afid-WpzVwpJvfFmEQHo,16878
sqlalchemy/ext/horizontal_shard.py,sha256=qREdIj09TeY5axbuWsFfrfCZuk9zgjDzcqREd-DxoAo,8990
sqlalchemy/ext/hybrid.py,sha256=WUFUUILbj6dj-7doZIyrKxKLEMGUGFytr-ZlrYxTGos,41675
sqlalchemy/ext/indexable.py,sha256=7GhAsHMFk_q4YiCAbMU0J9_jbQeJa73E-iVzDNB9DNU,11606
sqlalchemy/ext/instrumentation.py,sha256=rDk7cBPATJLzVTXgk4eiMMZOfhNXqKN0ej5uGB3L8J0,14787
sqlalchemy/ext/mutable.py,sha256=cuiyl1bgu6DSCMMCZcQIHObul1BLUh-jSTXGtZUgwm4,32947
sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/ext/mypy/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-36.pyc,,
sqlalchemy/ext/mypy/__pycache__/names.cpython-36.pyc,,
sqlalchemy/ext/mypy/__pycache__/plugin.cpython-36.pyc,,
sqlalchemy/ext/mypy/__pycache__/util.cpython-36.pyc,,
sqlalchemy/ext/mypy/decl_class.py,sha256=lsnUul0htQNEVnCSZbcIytshf5Cd7zPH2viqhExMib0,33438
sqlalchemy/ext/mypy/names.py,sha256=FH6E3eA6VdNg5xa1yO6MEapn8rwR_5yt-sutUQstOcA,6121
sqlalchemy/ext/mypy/plugin.py,sha256=3vS3m2HF5Dl7qizenQ-fQGEVCg58ljixl_jmNUyL_wU,8600
sqlalchemy/ext/mypy/util.py,sha256=8seB2s3EBE9AFLznT7oxHRPerv_kWCyuP6h2nFK09uM,2434
sqlalchemy/ext/orderinglist.py,sha256=sa2PZ06PH8Cs6Zwcgzog3vUg6WjUbGxdGkGkoMCpJxc,14262
sqlalchemy/ext/serializer.py,sha256=rae10NTl3_fm9I_SrXflEioL8GNQnShlH7cUxfSUnds,6132
sqlalchemy/future/__init__.py,sha256=uXl3ElTaXdjRoOBshDEitc07u6IGLfCzLONWerWRF88,542
sqlalchemy/future/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/future/__pycache__/engine.cpython-36.pyc,,
sqlalchemy/future/engine.py,sha256=re7gxY41VQAVmXGt-BjHv96dCxmnKSEwNA4TphSBy8U,18074
sqlalchemy/future/orm/__init__.py,sha256=_g7j2aIWEydf4JHJuJDetdAVACyheROjqXmBkKb2OaQ,298
sqlalchemy/future/orm/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/inspection.py,sha256=vfF9qC0gJSRzn4JLu9CjNENzqCtDdzAIthoLxl8rW50,3143
sqlalchemy/log.py,sha256=MWkG0aStnbomwsE9wqkuQqwQiRyTykirq9_oY2032tg,6994
sqlalchemy/orm/__init__.py,sha256=QXsaDq5-2MuFHPfC9eMuT1uaSGBkZ303s8gCc-6BtTE,10728
sqlalchemy/orm/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/attributes.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/base.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/clsregistry.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/collections.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/context.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/decl_api.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/decl_base.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/dependency.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/descriptor_props.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/dynamic.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/evaluator.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/events.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/exc.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/identity.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/instrumentation.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/interfaces.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/loading.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/mapper.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/path_registry.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/persistence.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/properties.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/query.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/relationships.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/scoping.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/session.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/state.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/strategies.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/strategy_options.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/sync.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/unitofwork.cpython-36.pyc,,
sqlalchemy/orm/__pycache__/util.cpython-36.pyc,,
sqlalchemy/orm/attributes.py,sha256=YTnvvuY4M5ps_VlRlDUzOgzZvBash-0vEePrAN3iZ58,76652
sqlalchemy/orm/base.py,sha256=JR1bHf9tmjd5Ucb_b4-Yx20p210hkeklUjYeA0ea_XY,15473
sqlalchemy/orm/clsregistry.py,sha256=T_cR_8J6_k8zBAtISYPnjWpKbMKQAbzREdPhBDzG6Z8,13731
sqlalchemy/orm/collections.py,sha256=zlcMywc13vKE5W14LBx3sN2RC9NjIltZy7-qEmvmySA,54774
sqlalchemy/orm/context.py,sha256=KQopouTWI9HE0_XHOJdWY7qKIVHw34O-big-3QpFh10,101395
sqlalchemy/orm/decl_api.py,sha256=qXYtB2tTJB189Kd1GcnY1dv1if6QyWLo_WjXwYDblME,34925
sqlalchemy/orm/decl_base.py,sha256=i5w2pXJKpmPL_Y6bQyvkZL3FTNuAt8SDl6th_dtDQWo,42924
sqlalchemy/orm/dependency.py,sha256=HPQE5POiLq8yUPuFtQ-IrMS9jPw67Z1-05c7xuZbm4Y,48268
sqlalchemy/orm/descriptor_props.py,sha256=e1QhefbkaR1ogmfxf4OXg1jtLY9ouUUMbkhNfAYrqxw,26266
sqlalchemy/orm/dynamic.py,sha256=POMl2pG9KpApClnjUEVNpvQ1qdIosZV8abe3jSdvIgg,15201
sqlalchemy/orm/evaluator.py,sha256=ymKsLcqOwf7OZ98PZINfgp2WUlB-Hv1pAXuoAbWITHU,7092
sqlalchemy/orm/events.py,sha256=XkM-zppdfKJeHGoZVFN0by9xg8kaWx7fWJY7YBb-Jwc,111676
sqlalchemy/orm/exc.py,sha256=XQNh69fkGLyeZ3apPiTTRwSO6y_ULbVOBLUACOfWNFc,6735
sqlalchemy/orm/identity.py,sha256=9D0FTwPOsUQ21mTlOkzdMSoIKYPcyn1gvbxee6MqHyI,7040
sqlalchemy/orm/instrumentation.py,sha256=-B_1XmWHK_P-N2A8m7CW5MqyBPzXGtH0r4-RBdQiGcs,20966
sqlalchemy/orm/interfaces.py,sha256=jJXyh_QIOn0v762C5hK3l1hSW4XS2eMb-jAR1roSGl4,28950
sqlalchemy/orm/loading.py,sha256=W5L9bL0qGf_wkEMXU3ly8bIF9DsTGRWCFt3ZrqRk0z0,48903
sqlalchemy/orm/mapper.py,sha256=0UoSeES6ANuw_3u_gqk9jGSYhUhhwyxpXIvHFOnE8Zg,139785
sqlalchemy/orm/path_registry.py,sha256=Qe86D6vzluH4rkfnmb3ZpHnTCt7eBri5PtyuhlcCbUQ,15588
sqlalchemy/orm/persistence.py,sha256=rhqAtyFKgJMin6hGFgTAdLgK2kpMowAfBiytkERurkg,81401
sqlalchemy/orm/properties.py,sha256=A2u2eDZ-2vI0D_lIBHVUfdqNDNhYF6gAF4sXiLN1BDQ,15099
sqlalchemy/orm/query.py,sha256=yOG5kp1Y2gmkJcuTxzQOMhBxV-aRWFYIIIDDiCXks2o,123906
sqlalchemy/orm/relationships.py,sha256=02rbGpsn20mzEKSA0mqfYzUBgMu2etNHqIp4vWkKbIY,144868
sqlalchemy/orm/scoping.py,sha256=7AYdcX5pC8djVbENScZIdu0_iti7LpmyhUWY9WriQGI,6793
sqlalchemy/orm/session.py,sha256=CxTiUFRAkP0FLOh182YzbX3oMPinu3xL1uJUxacc4To,156618
sqlalchemy/orm/state.py,sha256=nEUWZAFYzogMWkds8IehYV79isyIU8k75yAt_6B22rA,32553
sqlalchemy/orm/strategies.py,sha256=BoVWRA_oN7v04amgrCp_vkbw_wYJoy0AcvuDpJsiBBg,106174
sqlalchemy/orm/strategy_options.py,sha256=AAE0vHpIhw5hslirWgZhwGJiHsC-SzKS_u_0Pkv1nZg,60464
sqlalchemy/orm/sync.py,sha256=3WNF_rv2bvq-8w-Umo6WPPMpZR81CCC6lR0LVsdUyyQ,5990
sqlalchemy/orm/unitofwork.py,sha256=1tNseszFTxL6ILhekFC0PbmWR-JuKgGI16My_9mwQrU,27717
sqlalchemy/orm/util.py,sha256=I52Yult_K2ocyfuFXkJrB7y8s36h8QODNKBq3ggsraA,69382
sqlalchemy/pool/__init__.py,sha256=vPypHYNtKMNIc2Yrgw6qyVWHvvisQfIC0Gn3AhKi654,1658
sqlalchemy/pool/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/base.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/events.cpython-36.pyc,,
sqlalchemy/pool/__pycache__/impl.cpython-36.pyc,,
sqlalchemy/pool/base.py,sha256=Mr_Jcq9-Jbf7q7Kmb_TZjM4GPebCZ219gbyNCUNLDfg,36258
sqlalchemy/pool/dbapi_proxy.py,sha256=EECXJ-pyG1436lAs5H02gN3ByZIRd54yeVG6EInHmQU,4375
sqlalchemy/pool/events.py,sha256=e9pXO_HEZzsg-8V6jdtV-u6BjW4GCvdvSIJYeNYh8GU,8942
sqlalchemy/pool/impl.py,sha256=4hnbCYPqVDGj6pQIviWYs2Tw-nntXKlo7KHr6W30wUM,16292
sqlalchemy/processors.py,sha256=S-h949YV8k_f3HMoV2ChfwZE5L-e1sDPYTko5M1Gj_o,5920
sqlalchemy/schema.py,sha256=ntWzaOocNzjjFTTGcH2W-OHElZYiAxLFcWUhxVQJ-oE,2471
sqlalchemy/sql/__init__.py,sha256=Ie0LsqfpzMNZN621dtp3LFeF0ySw-FJmMoLrqrL81Z8,4810
sqlalchemy/sql/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/annotation.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/base.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/coercions.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/compiler.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/crud.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/ddl.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/default_comparator.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/dml.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/elements.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/events.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/expression.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/functions.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/lambdas.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/naming.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/operators.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/roles.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/schema.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/selectable.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/sqltypes.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/traversals.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/type_api.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/util.cpython-36.pyc,,
sqlalchemy/sql/__pycache__/visitors.cpython-36.pyc,,
sqlalchemy/sql/annotation.py,sha256=DEkS5qPYTCEPMPFnWtf15NjDwJh32uh-KhPtInHoVqM,11588
sqlalchemy/sql/base.py,sha256=HNPpfrmc-yx6av6iZEtIQeJD9s4k18MN6aBCSVud9VI,52850
sqlalchemy/sql/coercions.py,sha256=xwaykN7fYLMmIrBifEEvcxr_pnEU1-XS65xYVjXZgNM,33173
sqlalchemy/sql/compiler.py,sha256=04o-fxN4oRfSeR8HCWYyEPNQZMqgF2bU-T082_uklYY,173483
sqlalchemy/sql/crud.py,sha256=4cwaeDAMZbg8ta-dvtIo8CStvqiDRDMnaptrVkghgF4,35283
sqlalchemy/sql/ddl.py,sha256=8mPsx-7Clur3dW3in_K9rGVcGBQKYML78ocg4d0sTFc,45348
sqlalchemy/sql/default_comparator.py,sha256=7iC7YGjoXyICcnxjovGA37T_smTSSnMlsf4RJGprF2U,11233
sqlalchemy/sql/dml.py,sha256=U4FUBR63s_Tjn2pZRjpWOsGlk0lAG6pK41QADNrDHxk,52762
sqlalchemy/sql/elements.py,sha256=vH34kcbnioeAOu80PN7ggiGMROY55qiCzb-iTWOe-h0,173376
sqlalchemy/sql/events.py,sha256=pQJPAeybf1jel2LMq59CU5cYe-_duV9pkUGbDxkSelc,13412
sqlalchemy/sql/expression.py,sha256=xA7Kg_G_oobzVANzXaXRC9Ms96U0g80UxgxH2GrPdmo,9105
sqlalchemy/sql/functions.py,sha256=lfy_TpbXay7hRiMhIXpF6sjLf6qFkxGrAzOPs7Qih0M,48000
sqlalchemy/sql/lambdas.py,sha256=Op11pCUChXpSRL1s3GL17FcII2y0zyMfwcZPa7PCXyo,43940
sqlalchemy/sql/naming.py,sha256=frPyvaq5BALvRCUcaH7s30e8lTZMoWEBHLkZILUIdi4,6995
sqlalchemy/sql/operators.py,sha256=lGH-1wxVybJ8t-GIbocnXxX8xU1jrY1FTVgPzH8n8jU,49307
sqlalchemy/sql/roles.py,sha256=IxcdKrnuHxLUAgsQ2OQVE6hXAFPUJOJ2BAh0hdbZmT8,5759
sqlalchemy/sql/schema.py,sha256=Ru-5XScUFLSJSuovyAPAKMpaCeeeHq7qfxXBgNq0jLo,191720
sqlalchemy/sql/selectable.py,sha256=rGj0V8VXmtYtkgZHYOWV9VZK5wJW4Yj85FWB6Epvm7Q,219087
sqlalchemy/sql/sqltypes.py,sha256=-gooI6xs8bcLVLGEMlcEm_kf377VfzmIsIIZy7pH9WA,109797
sqlalchemy/sql/traversals.py,sha256=kNA_J3CFrpumIQQhmCgGy1Z-9JJCUIw1sr5AP2zqWxQ,49060
sqlalchemy/sql/type_api.py,sha256=KDP2EkJC8xin-3slh1p9JBKXOjEoLmqLoGDuEs78by8,56264
sqlalchemy/sql/util.py,sha256=xoWI-NGQjmVWgTnODk2BVXvzNAalJtReSL-IrdzbQ2w,35518
sqlalchemy/sql/visitors.py,sha256=aMfJQOl-mBFLc5k5_YZVR3ylwknWiFyXX_D9gYDyiDA,26682
sqlalchemy/testing/__init__.py,sha256=d-8eOqCyQ8mzNhUwEgKk4fYLN41av2RPtPm_wN4XxPY,2773
sqlalchemy/testing/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/assertions.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/assertsql.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/asyncio.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/config.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/engines.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/entities.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/exclusions.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/fixtures.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/mock.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/pickleable.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/profiling.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/provision.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/requirements.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/schema.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/util.cpython-36.pyc,,
sqlalchemy/testing/__pycache__/warnings.cpython-36.pyc,,
sqlalchemy/testing/assertions.py,sha256=RrB0Az98U0AB4w0xCosIs7Ig_HcPdedyur8vwk-JsAo,23968
sqlalchemy/testing/assertsql.py,sha256=c0847vFe3fPneaPDkHQc_P3f06S3NwPbY0n61zIjypI,14491
sqlalchemy/testing/asyncio.py,sha256=4qUnZPDR7omIo3FLoRWy6hBJbne1KbyMELbdIWEZddk,3800
sqlalchemy/testing/config.py,sha256=IxwIEAW7_1b88yUOX_wjivjuS9PvlCJVEpIScmtQh7Y,6751
sqlalchemy/testing/engines.py,sha256=vVlEGSVwHk8U_k1bz_IyS2kt0kOfGOBPMbkcz6_Tx14,12872
sqlalchemy/testing/entities.py,sha256=BzhnwvfZ_NkNn4xtvFbLzPvQw2T9gEBNc7hMay2cG5s,3363
sqlalchemy/testing/exclusions.py,sha256=2X5m5QCLO0W0D6GmI7cw6BuMIclsBoMnnjaqAY2Hclc,13793
sqlalchemy/testing/fixtures.py,sha256=3CVr61NoJ9VbhjIBafnYHtvhDYJH44cta39z1xMyZZg,19452
sqlalchemy/testing/mock.py,sha256=krEfc7Vz1EEjkPb1D07yRZXX1ehaCCT2uFoQiXz9EOM,925
sqlalchemy/testing/pickleable.py,sha256=5Dlu3OP7s5DKAI1c4XP6UCz_17MOJsM3E0zqQWesZFA,2847
sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
sqlalchemy/testing/plugin/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-36.pyc,,
sqlalchemy/testing/plugin/__pycache__/reinvent_fixtures_py2k.cpython-36.pyc,,
sqlalchemy/testing/plugin/bootstrap.py,sha256=0104b_dYFUT5WsZU-NL44wlMo1LrKByBtPQYaFt6CTQ,1738
sqlalchemy/testing/plugin/plugin_base.py,sha256=teDNljoiqay0PjFBiTLxvszinCGMuomXtKxSxQ2tu1k,22010
sqlalchemy/testing/plugin/pytestplugin.py,sha256=IKRXIdB_DFAZ9Bdm3iTtS2vPZI0YBzD-d12nMWZ13A4,25201
sqlalchemy/testing/plugin/reinvent_fixtures_py2k.py,sha256=b9fWp5RXdePykrNviZPXaGDIjOEOfovchez2Ovr4IRQ,3400
sqlalchemy/testing/profiling.py,sha256=K4Kn3o4GMbdb-1YMu1E4tPkpuHxbnJIWEqypsv_nY2s,10900
sqlalchemy/testing/provision.py,sha256=xUFZc42DaS2B72bmfZsES5RdtBYbi2N7J1_NLMVeebE,12555
sqlalchemy/testing/requirements.py,sha256=9VYhn71D8LupidnMMy1jHX7DvebjugOdtnq2boExexk,41301
sqlalchemy/testing/schema.py,sha256=D3Kh8anrII4PK3yTebCe6dyTyLxCc2kZqK5zVtSnkfc,6441
sqlalchemy/testing/suite/__init__.py,sha256=u3lEc0j47s7Dad_2SVWOZ6EU2aOMRWqE_WrQ17HmBsA,489
sqlalchemy/testing/suite/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_cte.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_insert.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_results.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_select.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_types.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-36.pyc,,
sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-36.pyc,,
sqlalchemy/testing/suite/test_cte.py,sha256=shi2WJZpzAzDCdkmzx0IDEu-URsLLBsdyFdzDqsfpyw,6387
sqlalchemy/testing/suite/test_ddl.py,sha256=BZuYkKaG_tNUkZcFLNd6o1qnfbzU0IYiedCw_NhOJBg,12143
sqlalchemy/testing/suite/test_deprecations.py,sha256=0LUmXIiK8hHUr6tY8cJJC7VErOj9YNGNN-c324k08Dw,5204
sqlalchemy/testing/suite/test_dialect.py,sha256=XpwZxIXlMVsZlP74khuPW2wZskinT0IOT9S6PM5PyE8,11256
sqlalchemy/testing/suite/test_insert.py,sha256=Akt1LtKajEDpSHBpvi3JikPw5xbN7BkpqQouWEo7l3w,11028
sqlalchemy/testing/suite/test_reflection.py,sha256=kZI4xcVR7_Ft_8kYYF58FjM7IIeuFzS0yXDj7lVfwbA,56815
sqlalchemy/testing/suite/test_results.py,sha256=9g7xHkd97fpYBo1ITcQXmk9sV4YXMr0jXN2bi4Y0vaY,14187
sqlalchemy/testing/suite/test_rowcount.py,sha256=ExxMW8o8PyRSg8H73Fv_oy9Z5-hL8JcolY9FOMi2tJU,4917
sqlalchemy/testing/suite/test_select.py,sha256=OAXCbV4TPHtIn8oPI9w2LH3M6yw_RJBOZgh184armIY,44660
sqlalchemy/testing/suite/test_sequence.py,sha256=2KBcs0FtoL3gk37IN2PnRZSnQwt7RKkShAbYQHFTBcw,8713
sqlalchemy/testing/suite/test_types.py,sha256=MwidlAJ0UihzDRiU2XSLjxigTOTBMamSg05bLozAt9k,45092
sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=T9Bkkj7Cm2mIQW5BQFU2NnJVnvBhZyEQXcuHRO3AhKc,6937
sqlalchemy/testing/suite/test_update_delete.py,sha256=v7_DDYS2RuhJgYvapgQlE8LZa4PiQklans8F2w27Xwg,1582
sqlalchemy/testing/util.py,sha256=pF1e_qH8eH1LVN4xB0hgL3XNfsbb5A2iRvNG8c_IgVc,12960
sqlalchemy/testing/warnings.py,sha256=rddnpL1RgoRcUO9JWB_heOo_oRYk6Dq19f-gd1iyt8A,5779
sqlalchemy/types.py,sha256=xd4i1Yi0zY9lb6jZuy70TOkdKwP_deLpB20IQ3nx_zw,2997
sqlalchemy/util/__init__.py,sha256=e1kiQqCKVBA3qLujsVNosM1T2Y9crSZsddYoeAAtsYU,6390
sqlalchemy/util/__pycache__/__init__.cpython-36.pyc,,
sqlalchemy/util/__pycache__/_collections.cpython-36.pyc,,
sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-36.pyc,,
sqlalchemy/util/__pycache__/_preloaded.cpython-36.pyc,,
sqlalchemy/util/__pycache__/compat.cpython-36.pyc,,
sqlalchemy/util/__pycache__/concurrency.cpython-36.pyc,,
sqlalchemy/util/__pycache__/deprecations.cpython-36.pyc,,
sqlalchemy/util/__pycache__/langhelpers.cpython-36.pyc,,
sqlalchemy/util/__pycache__/queue.cpython-36.pyc,,
sqlalchemy/util/__pycache__/topological.cpython-36.pyc,,
sqlalchemy/util/_collections.py,sha256=vQ5XrglAq0tVxIQtay9WVuQSgnKJ-L_bF4bRflNgEew,30227
sqlalchemy/util/_concurrency_py3k.py,sha256=rczBsNqMdZXVO8X8tWbH7R9353vJQpjicCUSvbzj5WE,5663
sqlalchemy/util/_preloaded.py,sha256=B7sQrjHOamdgOWZ1EAFWPOvlXH59ose5KoOwFMkhFe0,2463
sqlalchemy/util/compat.py,sha256=e_xujH7Oh5FR1sULMO6bsPV_10jUd2GVLFm3_raFjvg,18170
sqlalchemy/util/concurrency.py,sha256=V-ksGDclRRMI28JW-PoVhMY4dcNjH0spkTGydzKX50U,1479
sqlalchemy/util/deprecations.py,sha256=3USGCFIbkb8ez-DuaCqgIbTrZS7hXVKan1rHJOWYzLU,11858
sqlalchemy/util/langhelpers.py,sha256=UP0PHoAbl6mOM25qT64ZTEON8ZRbVc2unbJGPEoEIuw,56532
sqlalchemy/util/queue.py,sha256=SoUrtd6cw5ncTUk2FN1-YEvetBHcZzo5Zbr2l-9VJGw,9583
sqlalchemy/util/topological.py,sha256=w9_UvLP0Gf8wsCjpBxNwspXdoll_7kqO-rYVb-I5AU8,2957

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp36-cp36m-win_amd64

View File

@ -0,0 +1 @@
sqlalchemy

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2013-2020 aiohttp maintainers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,978 @@
Metadata-Version: 2.1
Name: aiohttp
Version: 3.7.4.post0
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Author: Nikolay Kim
Author-email: fafhrd91@gmail.com
Maintainer: Nikolay Kim <fafhrd91@gmail.com>, Andrew Svetlov <andrew.svetlov@gmail.com>
Maintainer-email: aio-libs@googlegroups.com
License: Apache 2
Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
Project-URL: CI: Azure Pipelines, https://dev.azure.com/aio-libs/aiohttp/_build
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
Platform: UNKNOWN
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Framework :: AsyncIO
Requires-Python: >=3.6
Requires-Dist: attrs (>=17.3.0)
Requires-Dist: chardet (<5.0,>=2.0)
Requires-Dist: multidict (<7.0,>=4.5)
Requires-Dist: async-timeout (<4.0,>=3.0)
Requires-Dist: yarl (<2.0,>=1.0)
Requires-Dist: typing-extensions (>=3.6.5)
Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
Provides-Extra: speedups
Requires-Dist: aiodns ; extra == 'speedups'
Requires-Dist: brotlipy ; extra == 'speedups'
Requires-Dist: cchardet ; extra == 'speedups'
==================================
Async http client/server framework
==================================
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png
:height: 64px
:width: 64px
:alt: aiohttp logo
|
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
:alt: GitHub Actions status for master branch
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
:target: https://codecov.io/gh/aio-libs/aiohttp
:alt: codecov.io status for master branch
.. image:: https://badge.fury.io/py/aiohttp.svg
:target: https://pypi.org/project/aiohttp
:alt: Latest PyPI package version
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
:target: https://aio-libs.discourse.group
:alt: Discourse status
.. image:: https://badges.gitter.im/Join%20Chat.svg
:target: https://gitter.im/aio-libs/Lobby
:alt: Chat on Gitter
Key Features
============
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
- Provides Web-server with middlewares and plugable routing.
Getting started
===============
Client
------
To get something from the web:
.. code-block:: python
import aiohttp
import asyncio
async def main():
async with aiohttp.ClientSession() as session:
async with session.get('http://python.org') as response:
print("Status:", response.status)
print("Content-type:", response.headers['content-type'])
html = await response.text()
print("Body:", html[:15], "...")
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
This prints:
.. code-block::
Status: 200
Content-type: text/html; charset=utf-8
Body: <!doctype html> ...
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
Server
------
An example using a simple server:
.. code-block:: python
# examples/server_simple.py
from aiohttp import web
async def handle(request):
name = request.match_info.get('name', "Anonymous")
text = "Hello, " + name
return web.Response(text=text)
async def wshandle(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
async for msg in ws:
if msg.type == web.WSMsgType.text:
await ws.send_str("Hello, {}".format(msg.data))
elif msg.type == web.WSMsgType.binary:
await ws.send_bytes(msg.data)
elif msg.type == web.WSMsgType.close:
break
return ws
app = web.Application()
app.add_routes([web.get('/', handle),
web.get('/echo', wshandle),
web.get('/{name}', handle)])
if __name__ == '__main__':
web.run_app(app)
Documentation
=============
https://aiohttp.readthedocs.io/
Demos
=====
https://github.com/aio-libs/aiohttp-demos
External links
==============
* `Third party libraries
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
* `Built with aiohttp
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
* `Powered by aiohttp
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
*aio-libs discourse group*: https://aio-libs.discourse.group
*gitter chat* https://gitter.im/aio-libs/Lobby
We support `Stack Overflow
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
Please add *aiohttp* tag to your question there.
Requirements
============
- Python >= 3.6
- async-timeout_
- attrs_
- chardet_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
.. _chardet: https://pypi.python.org/pypi/chardet
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
``aiohttp`` is offered under the Apache 2 license.
Keepsafe
========
The aiohttp community would like to thank Keepsafe
(https://www.getkeepsafe.com) for its support in the early days of
the project.
Source code
===========
The latest developer version is available in a GitHub repository:
https://github.com/aio-libs/aiohttp
Benchmarks
==========
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.7.4.post0 (2021-03-06)
========================
Misc
----
- Bumped upper bound of the ``chardet`` runtime dependency
to allow their v4.0 version stream.
`#5366 <https://github.com/aio-libs/aiohttp/issues/5366>`_
----
3.7.4 (2021-02-25)
==================
Bugfixes
--------
- **(SECURITY BUG)** Started preventing open redirects in the
``aiohttp.web.normalize_path_middleware`` middleware. For
more details, see
https://github.com/aio-libs/aiohttp/security/advisories/GHSA-v6wp-4m6f-gcjg.
Thanks to `Beast Glatisant <https://github.com/g147>`__ for
finding the first instance of this issue and `Jelmer Vernooij
<https://jelmer.uk/>`__ for reporting and tracking it down
in aiohttp.
`#5497 <https://github.com/aio-libs/aiohttp/issues/5497>`_
- Fix interpretation difference of the pure-Python and the Cython-based
HTTP parsers construct a ``yarl.URL`` object for HTTP request-target.
Before this fix, the Python parser would turn the URI's absolute-path
for ``//some-path`` into ``/`` while the Cython code preserved it as
``//some-path``. Now, both do the latter.
`#5498 <https://github.com/aio-libs/aiohttp/issues/5498>`_
----
3.7.3 (2020-11-18)
==================
Features
--------
- Use Brotli instead of brotlipy
`#3803 <https://github.com/aio-libs/aiohttp/issues/3803>`_
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix ``web_middlewares.normalize_path_middleware`` behavior for patch without slash.
`#3669 <https://github.com/aio-libs/aiohttp/issues/3669>`_
- Fix overshadowing of overlapped sub-applications prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Make `BaseConnector.close()` a coroutine and wait until the client closes all connections. Drop deprecated "with Connector():" syntax.
`#3736 <https://github.com/aio-libs/aiohttp/issues/3736>`_
- Reset the ``sock_read`` timeout each time data is received for a ``aiohttp.client`` response.
`#3808 <https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of View
`#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Fixed querying the address families from DNS that the current host supports.
`#5156 <https://github.com/aio-libs/aiohttp/issues/5156>`_
- Change return type of MultipartReader.__aiter__() and BodyPartReader.__aiter__() to AsyncIterator.
`#5163 <https://github.com/aio-libs/aiohttp/issues/5163>`_
- Provide x86 Windows wheels.
`#5230 <https://github.com/aio-libs/aiohttp/issues/5230>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.FileResponse``.
`#3958 <https://github.com/aio-libs/aiohttp/issues/3958>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
- Fixed wrong "Usage" docstring of ``aiohttp.client.request``.
`#4603 <https://github.com/aio-libs/aiohttp/issues/4603>`_
- Add aiohttp-pydantic to third party libraries
`#5228 <https://github.com/aio-libs/aiohttp/issues/5228>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.7.2 (2020-10-27)
==================
Bugfixes
--------
- Fixed static files handling for loops without ``.sendfile()`` support
`#5149 <https://github.com/aio-libs/aiohttp/issues/5149>`_
----
3.7.1 (2020-10-25)
==================
Bugfixes
--------
- Fixed a type error caused by the conditional import of `Protocol`.
`#5111 <https://github.com/aio-libs/aiohttp/issues/5111>`_
- Server doesn't send Content-Length for 1xx or 204
`#4901 <https://github.com/aio-libs/aiohttp/issues/4901>`_
- Fix run_app typing
`#4957 <https://github.com/aio-libs/aiohttp/issues/4957>`_
- Always require ``typing_extensions`` library.
`#5107 <https://github.com/aio-libs/aiohttp/issues/5107>`_
- Fix a variable-shadowing bug causing `ThreadedResolver.resolve` to
return the resolved IP as the ``hostname`` in each record, which prevented
validation of HTTPS connections.
`#5110 <https://github.com/aio-libs/aiohttp/issues/5110>`_
- Added annotations to all public attributes.
`#5115 <https://github.com/aio-libs/aiohttp/issues/5115>`_
- Fix flaky test_when_timeout_smaller_second
`#5116 <https://github.com/aio-libs/aiohttp/issues/5116>`_
- Ensure sending a zero byte file does not throw an exception
`#5124 <https://github.com/aio-libs/aiohttp/issues/5124>`_
- Fix a bug in ``web.run_app()`` about Python version checking on Windows
`#5127 <https://github.com/aio-libs/aiohttp/issues/5127>`_
----
3.7.0 (2020-10-24)
==================
Features
--------
- Response headers are now prepared prior to running ``on_response_prepare`` hooks, directly before headers are sent to the client.
`#1958 <https://github.com/aio-libs/aiohttp/issues/1958>`_
- Add a ``quote_cookie`` option to ``CookieJar``, a way to skip quotation wrapping of cookies containing special characters.
`#2571 <https://github.com/aio-libs/aiohttp/issues/2571>`_
- Call ``AccessLogger.log`` with the current exception available from ``sys.exc_info()``.
`#3557 <https://github.com/aio-libs/aiohttp/issues/3557>`_
- `web.UrlDispatcher.add_routes` and `web.Application.add_routes` return a list
of registered `AbstractRoute` instances. `AbstractRouteDef.register` (and all
subclasses) return a list of registered resources registered resource.
`#3866 <https://github.com/aio-libs/aiohttp/issues/3866>`_
- Added properties of default ClientSession params to ClientSession class so it is available for introspection
`#3882 <https://github.com/aio-libs/aiohttp/issues/3882>`_
- Don't cancel web handler on peer disconnection, raise `OSError` on reading/writing instead.
`#4080 <https://github.com/aio-libs/aiohttp/issues/4080>`_
- Implement BaseRequest.get_extra_info() to access a protocol transports' extra info.
`#4189 <https://github.com/aio-libs/aiohttp/issues/4189>`_
- Added `ClientSession.timeout` property.
`#4191 <https://github.com/aio-libs/aiohttp/issues/4191>`_
- allow use of SameSite in cookies.
`#4224 <https://github.com/aio-libs/aiohttp/issues/4224>`_
- Use ``loop.sendfile()`` instead of custom implementation if available.
`#4269 <https://github.com/aio-libs/aiohttp/issues/4269>`_
- Apply SO_REUSEADDR to test server's socket.
`#4393 <https://github.com/aio-libs/aiohttp/issues/4393>`_
- Use .raw_host instead of slower .host in client API
`#4402 <https://github.com/aio-libs/aiohttp/issues/4402>`_
- Allow configuring the buffer size of input stream by passing ``read_bufsize`` argument.
`#4453 <https://github.com/aio-libs/aiohttp/issues/4453>`_
- Pass tests on Python 3.8 for Windows.
`#4513 <https://github.com/aio-libs/aiohttp/issues/4513>`_
- Add `method` and `url` attributes to `TraceRequestChunkSentParams` and `TraceResponseChunkReceivedParams`.
`#4674 <https://github.com/aio-libs/aiohttp/issues/4674>`_
- Add ClientResponse.ok property for checking status code under 400.
`#4711 <https://github.com/aio-libs/aiohttp/issues/4711>`_
- Don't ceil timeouts that are smaller than 5 seconds.
`#4850 <https://github.com/aio-libs/aiohttp/issues/4850>`_
- TCPSite now listens by default on all interfaces instead of just IPv4 when `None` is passed in as the host.
`#4894 <https://github.com/aio-libs/aiohttp/issues/4894>`_
- Bump ``http_parser`` to 2.9.4
`#5070 <https://github.com/aio-libs/aiohttp/issues/5070>`_
Bugfixes
--------
- Fix keepalive connections not being closed in time
`#3296 <https://github.com/aio-libs/aiohttp/issues/3296>`_
- Fix failed websocket handshake leaving connection hanging.
`#3380 <https://github.com/aio-libs/aiohttp/issues/3380>`_
- Fix tasks cancellation order on exit. The run_app task needs to be cancelled first for cleanup hooks to run with all tasks intact.
`#3805 <https://github.com/aio-libs/aiohttp/issues/3805>`_
- Don't start heartbeat until _writer is set
`#4062 <https://github.com/aio-libs/aiohttp/issues/4062>`_
- Fix handling of multipart file uploads without a content type.
`#4089 <https://github.com/aio-libs/aiohttp/issues/4089>`_
- Preserve view handler function attributes across middlewares
`#4174 <https://github.com/aio-libs/aiohttp/issues/4174>`_
- Fix the string representation of ``ServerDisconnectedError``.
`#4175 <https://github.com/aio-libs/aiohttp/issues/4175>`_
- Raising RuntimeError when trying to get encoding from not read body
`#4214 <https://github.com/aio-libs/aiohttp/issues/4214>`_
- Remove warning messages from noop.
`#4282 <https://github.com/aio-libs/aiohttp/issues/4282>`_
- Raise ClientPayloadError if FormData re-processed.
`#4345 <https://github.com/aio-libs/aiohttp/issues/4345>`_
- Fix a warning about unfinished task in ``web_protocol.py``
`#4408 <https://github.com/aio-libs/aiohttp/issues/4408>`_
- Fixed 'deflate' compression. According to RFC 2616 now.
`#4506 <https://github.com/aio-libs/aiohttp/issues/4506>`_
- Fixed OverflowError on platforms with 32-bit time_t
`#4515 <https://github.com/aio-libs/aiohttp/issues/4515>`_
- Fixed request.body_exists returns wrong value for methods without body.
`#4528 <https://github.com/aio-libs/aiohttp/issues/4528>`_
- Fix connecting to link-local IPv6 addresses.
`#4554 <https://github.com/aio-libs/aiohttp/issues/4554>`_
- Fix a problem with connection waiters that are never awaited.
`#4562 <https://github.com/aio-libs/aiohttp/issues/4562>`_
- Always make sure transport is not closing before reuse a connection.
Reuse a protocol based on keepalive in headers is unreliable.
For example, uWSGI will not support keepalive even it serves a
HTTP 1.1 request, except explicitly configure uWSGI with a
``--http-keepalive`` option.
Servers designed like uWSGI could cause aiohttp intermittently
raise a ConnectionResetException when the protocol poll runs
out and some protocol is reused.
`#4587 <https://github.com/aio-libs/aiohttp/issues/4587>`_
- Handle the last CRLF correctly even if it is received via separate TCP segment.
`#4630 <https://github.com/aio-libs/aiohttp/issues/4630>`_
- Fix the register_resource function to validate route name before splitting it so that route name can include python keywords.
`#4691 <https://github.com/aio-libs/aiohttp/issues/4691>`_
- Improve typing annotations for ``web.Request``, ``aiohttp.ClientResponse`` and
``multipart`` module.
`#4736 <https://github.com/aio-libs/aiohttp/issues/4736>`_
- Fix resolver task is not awaited when connector is cancelled
`#4795 <https://github.com/aio-libs/aiohttp/issues/4795>`_
- Fix a bug "Aiohttp doesn't return any error on invalid request methods"
`#4798 <https://github.com/aio-libs/aiohttp/issues/4798>`_
- Fix HEAD requests for static content.
`#4809 <https://github.com/aio-libs/aiohttp/issues/4809>`_
- Fix incorrect size calculation for memoryview
`#4890 <https://github.com/aio-libs/aiohttp/issues/4890>`_
- Add HTTPMove to _all__.
`#4897 <https://github.com/aio-libs/aiohttp/issues/4897>`_
- Fixed the type annotations in the ``tracing`` module.
`#4912 <https://github.com/aio-libs/aiohttp/issues/4912>`_
- Fix typing for multipart ``__aiter__``.
`#4931 <https://github.com/aio-libs/aiohttp/issues/4931>`_
- Fix for race condition on connections in BaseConnector that leads to exceeding the connection limit.
`#4936 <https://github.com/aio-libs/aiohttp/issues/4936>`_
- Add forced UTF-8 encoding for ``application/rdap+json`` responses.
`#4938 <https://github.com/aio-libs/aiohttp/issues/4938>`_
- Fix inconsistency between Python and C http request parsers in parsing pct-encoded URL.
`#4972 <https://github.com/aio-libs/aiohttp/issues/4972>`_
- Fix connection closing issue in HEAD request.
`#5012 <https://github.com/aio-libs/aiohttp/issues/5012>`_
- Fix type hint on BaseRunner.addresses (from ``List[str]`` to ``List[Any]``)
`#5086 <https://github.com/aio-libs/aiohttp/issues/5086>`_
- Make `web.run_app()` more responsive to Ctrl+C on Windows for Python < 3.8. It slightly
increases CPU load as a side effect.
`#5098 <https://github.com/aio-libs/aiohttp/issues/5098>`_
Improved Documentation
----------------------
- Fix example code in client quick-start
`#3376 <https://github.com/aio-libs/aiohttp/issues/3376>`_
- Updated the docs so there is no contradiction in ``ttl_dns_cache`` default value
`#3512 <https://github.com/aio-libs/aiohttp/issues/3512>`_
- Add 'Deploy with SSL' to docs.
`#4201 <https://github.com/aio-libs/aiohttp/issues/4201>`_
- Change typing of the secure argument on StreamResponse.set_cookie from ``Optional[str]`` to ``Optional[bool]``
`#4204 <https://github.com/aio-libs/aiohttp/issues/4204>`_
- Changes ``ttl_dns_cache`` type from int to Optional[int].
`#4270 <https://github.com/aio-libs/aiohttp/issues/4270>`_
- Simplify README hello word example and add a documentation page for people coming from requests.
`#4272 <https://github.com/aio-libs/aiohttp/issues/4272>`_
- Improve some code examples in the documentation involving websockets and starting a simple HTTP site with an AppRunner.
`#4285 <https://github.com/aio-libs/aiohttp/issues/4285>`_
- Fix typo in code example in Multipart docs
`#4312 <https://github.com/aio-libs/aiohttp/issues/4312>`_
- Fix code example in Multipart section.
`#4314 <https://github.com/aio-libs/aiohttp/issues/4314>`_
- Update contributing guide so new contributors read the most recent version of that guide. Update command used to create test coverage reporting.
`#4810 <https://github.com/aio-libs/aiohttp/issues/4810>`_
- Spelling: Change "canonize" to "canonicalize".
`#4986 <https://github.com/aio-libs/aiohttp/issues/4986>`_
- Add ``aiohttp-sse-client`` library to third party usage list.
`#5084 <https://github.com/aio-libs/aiohttp/issues/5084>`_
Misc
----
- `#2856 <https://github.com/aio-libs/aiohttp/issues/2856>`_, `#4218 <https://github.com/aio-libs/aiohttp/issues/4218>`_, `#4250 <https://github.com/aio-libs/aiohttp/issues/4250>`_
----
3.6.3 (2020-10-12)
==================
Bugfixes
--------
- Pin yarl to ``<1.6.0`` to avoid buggy behavior that will be fixed by the next aiohttp
release.
3.6.2 (2019-10-09)
==================
Features
--------
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
Bugfixes
--------
- Reset the ``sock_read`` timeout each time data is received for a
``aiohttp.ClientResponse``. `#3808
<https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fix handling of expired cookies so they are not stored in CookieJar.
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
- Fix misleading message in the string representation of ``ClientConnectorError``;
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
<https://github.com/aio-libs/aiohttp/issues/4097>`_
- Don't clobber HTTP status when using FileResponse.
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
Improved Documentation
----------------------
- Added minimal required logging configuration to logging documentation.
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
- Update docs to reflect proxy support.
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
- Fix typo in code example in testing docs.
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.6.1 (2019-09-19)
==================
Features
--------
- Compatibility with Python 3.8.
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
Bugfixes
--------
- correct some exception string format
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
unavailable because the runtime is built against
an outdated OpenSSL.
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
- Update multidict requirement to >= 4.5
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
Improved Documentation
----------------------
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
----
3.6.0 (2019-09-06)
==================
Features
--------
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
Proactor event loop to work. `#3629
<https://github.com/aio-libs/aiohttp/issues/3629>`_
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
compatible with more http proxy servers. `#3798
<https://github.com/aio-libs/aiohttp/issues/3798>`_
- Accept non-GET request for starting websocket handshake on server side.
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix an issue where cookies would sometimes not be set during a redirect.
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
- Change normalize_path_middleware to use 308 redirect instead of 301.
This behavior should prevent clients from being unable to use PUT/POST
methods on endpoints that are redirected because of a trailing slash.
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
task with unhandled exception when the server is used in conjunction with
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
- Use sanitized URL as Location header in redirects
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
- Improve typing annotations for multipart.py along with changes required
by mypy in files that references multipart.py.
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
- Cleanup per-chunk data in generic data read. Memory leak fixed.
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
- Use correct type for add_view and family
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
- Fix _keepalive field in __slots__ of ``RequestHandler``.
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
transport" exception when clients disconnect uncleanly.
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
- Suppress pytest warnings due to ``test_utils`` classes
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
- Fix overshadowing of overlapped sub-application prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Fixed return type annotation for WSMessage.json()
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
- Properly expose TooManyRedirects publicly as documented.
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
- Fix missing brackets for IPv6 in proxy CONNECT request
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
``asyncio.ClientSession.request`` according to the docs `#3852
<https://github.com/aio-libs/aiohttp/issues/3852>`_
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
- Add URL to the string representation of ClientResponseError.
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
Improved Documentation
----------------------
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
- use ``if __name__ == '__main__':`` in server examples.
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
- Update documentation reference to the default access logger.
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
----
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
``access_log=True`` and the event loop being in debug mode. `#3504
<https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
with files asynchronously. I/O based payloads from ``payload.py`` uses a
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
<https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write. Refactor the way how
``Payload.headers`` are handled. Payload instances now always have headers and
Content-Type defined. Fix Payload Content-Disposition header reset after initial
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
avoid a deprecation warning. `#3480
<https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
to run in a background executor to avoid blocking the main thread and potentially
triggering health check failures. (`#3205
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
and the default logger ``aiohttp.access`` is used, access logs will now be output
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
different HTTP method for WebSocket connection establishment. For example, ``Docker
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
collections module will not be supported anymore. (`#3273
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by ``normalize_path_middleware``. (`#3278
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await
connector.close()`` instead. (`#3417
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351

View File

@ -0,0 +1,136 @@
aiohttp-3.7.4.post0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
aiohttp-3.7.4.post0.dist-info/LICENSE.txt,sha256=gAD9PvGacMFN9xr1XVlZnYpL_ucI6iguio_9zKsMv88,11522
aiohttp-3.7.4.post0.dist-info/METADATA,sha256=ZF35_2WGQmQSkWbZ9iHwYLbq61rWAzpVTk0FNlVNy84,38836
aiohttp-3.7.4.post0.dist-info/RECORD,,
aiohttp-3.7.4.post0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
aiohttp-3.7.4.post0.dist-info/WHEEL,sha256=epucrC2yyYTysDCMzXuz8eGMTMKryzRfNOvMGdslbjc,101
aiohttp-3.7.4.post0.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
aiohttp/.hash/_cparser.pxd.hash,sha256=IrReMM-DHmX3hUqt6ZkWbHjAmBEFqWvWTpe0X5gwSXo,108
aiohttp/.hash/_find_header.pxd.hash,sha256=TxG5w4etbVd6sfm5JWbdf5PW6LnuXRQnlMoFBVGKN2E,112
aiohttp/.hash/_frozenlist.pyx.hash,sha256=UBmgbFYXCyTd4DwRcYZY1SBqTU3_IUK2Rmlw56PvtnI,111
aiohttp/.hash/_helpers.pyi.hash,sha256=D1pTrCkUaJ3by1XeGH_nE-amt7XdjfRHcm9oRtoGhHQ,108
aiohttp/.hash/_helpers.pyx.hash,sha256=MA4zlNd5xukP4VDAbnoId0Azv8HxCpwLWie2gSMPLsw,108
aiohttp/.hash/_http_parser.pyx.hash,sha256=LAHg2wAi0_2KUaH9DV1UJQr2jxgZVrHIJk2TIDro9bo,112
aiohttp/.hash/_http_writer.pyx.hash,sha256=S68YR2hVoBRgQzI7YCAM1SnlUWr4fOSr16FkcS1-H1k,112
aiohttp/.hash/_websocket.pyx.hash,sha256=8AcsJ5Tb8lZ9_QVXor_1Xbtl5igK1iP5rtEZZ0iA2AE,110
aiohttp/.hash/frozenlist.pyi.hash,sha256=9Xim5smJMiLGey1D0-BUiLxHs1XaV2_aYKAv7eQ7M_4,110
aiohttp/.hash/hdrs.py.hash,sha256=yDL4bbjH3uQidHSTCQkAArTdZgQgLidoaXB0HkaWhS0,103
aiohttp/.hash/signals.pyi.hash,sha256=kHUKGkyP9XjurohZ39KYUw2W2FEmUuTDp7MCF9AZPus,107
aiohttp/__init__.py,sha256=UmLziO7Qi_M22n8tfE4cL-qMCbN5ZodS3ADxSg5ri-M,7157
aiohttp/__pycache__/__init__.cpython-36.pyc,,
aiohttp/__pycache__/abc.cpython-36.pyc,,
aiohttp/__pycache__/base_protocol.cpython-36.pyc,,
aiohttp/__pycache__/client.cpython-36.pyc,,
aiohttp/__pycache__/client_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/client_proto.cpython-36.pyc,,
aiohttp/__pycache__/client_reqrep.cpython-36.pyc,,
aiohttp/__pycache__/client_ws.cpython-36.pyc,,
aiohttp/__pycache__/connector.cpython-36.pyc,,
aiohttp/__pycache__/cookiejar.cpython-36.pyc,,
aiohttp/__pycache__/formdata.cpython-36.pyc,,
aiohttp/__pycache__/frozenlist.cpython-36.pyc,,
aiohttp/__pycache__/hdrs.cpython-36.pyc,,
aiohttp/__pycache__/helpers.cpython-36.pyc,,
aiohttp/__pycache__/http.cpython-36.pyc,,
aiohttp/__pycache__/http_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/http_parser.cpython-36.pyc,,
aiohttp/__pycache__/http_websocket.cpython-36.pyc,,
aiohttp/__pycache__/http_writer.cpython-36.pyc,,
aiohttp/__pycache__/locks.cpython-36.pyc,,
aiohttp/__pycache__/log.cpython-36.pyc,,
aiohttp/__pycache__/multipart.cpython-36.pyc,,
aiohttp/__pycache__/payload.cpython-36.pyc,,
aiohttp/__pycache__/payload_streamer.cpython-36.pyc,,
aiohttp/__pycache__/pytest_plugin.cpython-36.pyc,,
aiohttp/__pycache__/resolver.cpython-36.pyc,,
aiohttp/__pycache__/signals.cpython-36.pyc,,
aiohttp/__pycache__/streams.cpython-36.pyc,,
aiohttp/__pycache__/tcp_helpers.cpython-36.pyc,,
aiohttp/__pycache__/test_utils.cpython-36.pyc,,
aiohttp/__pycache__/tracing.cpython-36.pyc,,
aiohttp/__pycache__/typedefs.cpython-36.pyc,,
aiohttp/__pycache__/web.cpython-36.pyc,,
aiohttp/__pycache__/web_app.cpython-36.pyc,,
aiohttp/__pycache__/web_exceptions.cpython-36.pyc,,
aiohttp/__pycache__/web_fileresponse.cpython-36.pyc,,
aiohttp/__pycache__/web_log.cpython-36.pyc,,
aiohttp/__pycache__/web_middlewares.cpython-36.pyc,,
aiohttp/__pycache__/web_protocol.cpython-36.pyc,,
aiohttp/__pycache__/web_request.cpython-36.pyc,,
aiohttp/__pycache__/web_response.cpython-36.pyc,,
aiohttp/__pycache__/web_routedef.cpython-36.pyc,,
aiohttp/__pycache__/web_runner.cpython-36.pyc,,
aiohttp/__pycache__/web_server.cpython-36.pyc,,
aiohttp/__pycache__/web_urldispatcher.cpython-36.pyc,,
aiohttp/__pycache__/web_ws.cpython-36.pyc,,
aiohttp/__pycache__/worker.cpython-36.pyc,,
aiohttp/_cparser.pxd,sha256=xvsLl13ZXXyHGyb2Us7WsLncndQrxhyGB4KXnvbsRtQ,4099
aiohttp/_find_header.c,sha256=-d1A3pkkpirVX5CDQaTSSTjdjXekmOjt-bqYcEQWbXc,197440
aiohttp/_find_header.h,sha256=HistyxY7K3xEJ53Y5xEfwrDVDkfcV0zQ9mkzMgzi_jo,184
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
aiohttp/_frozenlist.c,sha256=Ea69NSVskCydZGMWM-nZK7ejOITPd1_4RKima-Al9ng,294194
aiohttp/_frozenlist.cp36-win_amd64.pyd,sha256=vlRj-Ve5bqzDpXzoJgr9OiajGs-PdxSdhNJxuCT71WE,65024
aiohttp/_frozenlist.pyx,sha256=SB851KmtWpiJ2ZB05Tpo4855VkCyRtgMs843Wz8kFeg,2713
aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090
aiohttp/_helpers.c,sha256=JzeMvzUU5gUPfsUMoaeetvhME5i45bzOEC1bVUDAYn4,211990
aiohttp/_helpers.cp36-win_amd64.pyd,sha256=xEAvgJcVHYJFjnWlj2mYh7bGGzf27ajtVrqLZNl-Vwg,48640
aiohttp/_helpers.pyi,sha256=2Hd5IC0Zf4YTEJ412suyyhsh1kVyVDv5g4stgyo2Ksc,208
aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084
aiohttp/_http_parser.c,sha256=cxhXBsnSqgk8XgURzub_XFLbJSKJJO248-2vWvkT0wM,1011527
aiohttp/_http_parser.cp36-win_amd64.pyd,sha256=RB22hrb2Bu9zEbKp8MHUKbCr3QY2BU7uAlvbWKjFszM,237056
aiohttp/_http_parser.pyx,sha256=g8BRhSJK1X8TP3_V1WwzH0-eEBzVL5EjfntlaLVFnhw,29897
aiohttp/_http_writer.c,sha256=-xrk3WfKaYIGAwywdKrAQL1zYcVKV8L196WgulT04Cw,213022
aiohttp/_http_writer.cp36-win_amd64.pyd,sha256=cxHyPqPbO60DuwPOY99M0pZt_8pa0oS-nuPnbEsMhWQ,41984
aiohttp/_http_writer.pyx,sha256=rBzbk-xrIWO2hD0kKo5ILKSKsW_U8Xf15IAPnqSH23Q,4351
aiohttp/_websocket.c,sha256=t8Re9DbCbALSyrRrpnlXcSF39NxIebH4aYzysWDBQns,137429
aiohttp/_websocket.cp36-win_amd64.pyd,sha256=3RbKatQZ8wXyEosVrgIySZg_YT_zNTNcRUoX6IFYlaQ,28160
aiohttp/_websocket.pyx,sha256=o9J7yi9c2-jTBjE3dUkXxhDWKvRWJz5GZfyLsgJQa38,1617
aiohttp/abc.py,sha256=m5MSBBYS0fs4Kb0yROJrqufmupgnRMeLoJOcVylt_gQ,5447
aiohttp/base_protocol.py,sha256=BqQYyyTSwLjYtWe8pOCpvsjrlbovITd9rrZ5MMu8P8Q,2788
aiohttp/client.py,sha256=SY6_RXgfbeHDb63bccs-03a0Jj5f5RCBl5qpnYbsD6I,45191
aiohttp/client_exceptions.py,sha256=P0gmFGv4FAGiKeDCX6qtL_C-DlqsjwQ3HPI32oxFg34,8846
aiohttp/client_proto.py,sha256=3U2TI6gfoTRIzWZpknV4jGqO-fUQ_hJM-tKKQHWkATQ,8414
aiohttp/client_reqrep.py,sha256=1IlPotOVSIVqD_G2qALE1ChtB963nZICD8-CFqB00UY,37566
aiohttp/client_ws.py,sha256=pBTFy5Ss8iECE_4Cq5m86VZpDFOEZhlVeCtc7SeMopo,10588
aiohttp/connector.py,sha256=rMYFC4kaobHUSHmatR5GZyoJZ64R5uJx22QpGIWdOpA,44230
aiohttp/cookiejar.py,sha256=LluB0A_imJ19d7phW6cVuQMyZVz_Rq4RZ1_N09L3_ws,12545
aiohttp/formdata.py,sha256=akyeuVTAdPvtUpiFeX759uerv69PQB4mchq3ZOdTaNo,6250
aiohttp/frozenlist.py,sha256=nJaNj0CP5QRHw7U8Fqq2bYzrLWlQhWqNN_XNvlWlHeY,1790
aiohttp/frozenlist.pyi,sha256=kBG9J61ymCqiUvBkrjsRGVmfakmkzk6KHmZbdgRLCZY,1480
aiohttp/hdrs.py,sha256=XyvcUDaIZe-HUoro_WggyLNWdyCcVDC2aciFer7bnpQ,3554
aiohttp/helpers.py,sha256=LW6EL9AtwvGVAxPWPAL1pbIt3PMaOy9OJoW4TewRc-M,23698
aiohttp/http.py,sha256=NKlSh1UEf-ZoYBYI0IoAUq0jy_-wKyJQ-aT0GjQCy7k,1896
aiohttp/http_exceptions.py,sha256=rLwhCbFrOpQ_ntr3GnxaxD3oRnTTNM1utmDDBUbdVTU,2691
aiohttp/http_parser.py,sha256=IleNVZ3FGYmk3hKXz1RGwCcsUl5oUjDbikuwMcoyZ-I,31682
aiohttp/http_websocket.py,sha256=unIzhQEnAGpeXESvvtmP7k59EiTTmoG1_rd5BPoYrqk,25796
aiohttp/http_writer.py,sha256=z2K59frCgLJGwKADU3q-VRgHLKLXNzq6MW1Geg9pea8,5523
aiohttp/locks.py,sha256=-ySdj_OPys4s1LvpSeFRl6XHgmcpVGbrNtuoKsZ-lXQ,1265
aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333
aiohttp/multipart.py,sha256=P1erLT4m3Cj6j_6fO_dJhppntg4EocRRBGYCgptnCxc,33208
aiohttp/payload.py,sha256=m6RIJyZXIumdRpvGGJovdjsVcL--6GybxjpUyjKXGo4,13781
aiohttp/payload_streamer.py,sha256=avZCRjdpQU2t0HL5YeWs7JQaaUHe81YcyYxQzrECVLA,2176
aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8
aiohttp/pytest_plugin.py,sha256=rJ8PLNtR6UhMwfD-BeLMeHdNQSWUhEpVh3nhdsAHghQ,11389
aiohttp/resolver.py,sha256=TyLUEe8QyxLCYy8jETJa-8MotQxmKfcEfg85wjv8_hs,4757
aiohttp/signals.py,sha256=HdX5hKj-w-jIVrYTKADRzXZh-2x26CwF4UKSQy9zMsg,886
aiohttp/signals.pyi,sha256=EnNobON7azFQ1fHSiklvbrvQfQUnOZPi3n_6pKEzXoM,331
aiohttp/streams.py,sha256=ZEEnFyTIecfzeJGcsxpD7LXo4bS93IpeQvep5NAXQsA,21177
aiohttp/tcp_helpers.py,sha256=jPHZyIHbIAqyWS0QShT_ZgKLMiDW7s_124IPc4irTU8,1000
aiohttp/test_utils.py,sha256=UgC_8I0WVE0LgFtKKHfLIBceySkVE7oCatuyj5-dQeg,20929
aiohttp/tracing.py,sha256=_oTwN_h8sj8seL0QfeTlWAaUhTe1yv8glc8Wutuitds,14805
aiohttp/typedefs.py,sha256=Am4eWH_C4lE_m3pl3IlfuB-KpqkjStTRMYcoS8cJMC4,1420
aiohttp/web.py,sha256=Zd6dGInVzbHlzIoYGwe86fdIFP3hDA_-FPevp_B356M,18462
aiohttp/web_app.py,sha256=IKHRp1PrRHJQYI-km_dhxJlchc89gWfI19pmC5rTK-o,17605
aiohttp/web_exceptions.py,sha256=ydzJJKwJWHOKzjzh0XtZNzZ5NCb0Me8DKmlKy2qvijw,10547
aiohttp/web_fileresponse.py,sha256=rFpMfXUcbfVjQkVAt_xtA6tbgrUVj7BJQCGZHBBtNI8,9268
aiohttp/web_log.py,sha256=B-gy5ixLtq6TVXvBzXuCLGf96GM4qeWjjNIiczVllLM,7706
aiohttp/web_middlewares.py,sha256=fP2Fp113O5keX-h4VD4rJTQUM_5Si4vXu1m-1fFNIpU,4314
aiohttp/web_protocol.py,sha256=6B0cUAuPTF6JTtRmLCKStqjdD8_rsKNKNeYeSOL5hD0,23918
aiohttp/web_request.py,sha256=7U-Rizv5L_srCVVjQaG4Mer3kbyifTt2LAgOnzFfQ6A,27278
aiohttp/web_response.py,sha256=8hzlkbHWHp7HXpVXV9jZF_Cq2aKl6X-0pkl_IfcZ7jg,26983
aiohttp/web_routedef.py,sha256=7gnG-KLvQPLDXxGUJXwOC3_k2ufqFWMAP8qofprQ79c,6324
aiohttp/web_runner.py,sha256=E4asEULIN8umMWCpmYhJ4eAH2SbjAStk0FMolhu-cZk,11575
aiohttp/web_server.py,sha256=iKc9a4fQS14-3ivqzBiBp742m8vEexRZiSzeKTW7NCo,2120
aiohttp/web_urldispatcher.py,sha256=ydsygCAoYnBvOL7qW4cspYKaTr0bEJjQVnUEQAZsb8g,40765
aiohttp/web_ws.py,sha256=5edpQhp6h135RZaHkVTDNFbr_qU9lcZf6iUefmBXcmg,17264
aiohttp/worker.py,sha256=YKvDyIcNrRROfhH-huN1EMoB31WbNnDTAh60Tu7hOxQ,8274

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp36-cp36m-win_amd64

View File

@ -0,0 +1 @@
aiohttp

View File

@ -0,0 +1 @@
c6fb0b975dd95d7c871b26f652ced6b0b9dc9dd42bc61c860782979ef6ec46d4 *D:/a/aiohttp/aiohttp/aiohttp/_cparser.pxd

View File

@ -0,0 +1 @@
0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 *D:/a/aiohttp/aiohttp/aiohttp/_find_header.pxd

View File

@ -0,0 +1 @@
481f39d4a9ad5a9889d99074e53a68e3ce795640b246d80cb3ce375b3f2415e8 *D:/a/aiohttp/aiohttp/aiohttp/_frozenlist.pyx

View File

@ -0,0 +1 @@
d87779202d197f8613109e35dacbb2ca1b21d64572543bf9838b2d832a362ac7 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyi

View File

@ -0,0 +1 @@
b6097b7d987440c4fa7237f88d227c89a3ba0dd403dc638ddbe487e0de7f1138 *D:/a/aiohttp/aiohttp/aiohttp/_helpers.pyx

View File

@ -0,0 +1 @@
83c05185224ad57f133f7fd5d56c331f4f9e101cd52f91237e7b6568b5459e1c *D:/a/aiohttp/aiohttp/aiohttp/_http_parser.pyx

View File

@ -0,0 +1 @@
ac1cdb93ec6b2163b6843d242a8e482ca48ab16fd4f177f5e4800f9ea487db74 *D:/a/aiohttp/aiohttp/aiohttp/_http_writer.pyx

View File

@ -0,0 +1 @@
a3d27bca2f5cdbe8d3063137754917c610d62af456273e4665fc8bb202506b7f *D:/a/aiohttp/aiohttp/aiohttp/_websocket.pyx

View File

@ -0,0 +1 @@
9011bd27ad72982aa252f064ae3b1119599f6a49a4ce4e8a1e665b76044b0996 *D:/a/aiohttp/aiohttp/aiohttp/frozenlist.pyi

View File

@ -0,0 +1 @@
5f2bdc50368865ef87528ae8fd6820c8b35677209c5430b669c8857abedb9e94 *D:/a/aiohttp/aiohttp/aiohttp/hdrs.py

View File

@ -0,0 +1 @@
1273686ce37b6b3150d5f1d28a496f6ebbd07d05273993e2de7ffaa4a1335e83 *D:/a/aiohttp/aiohttp/aiohttp/signals.pyi

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,26 @@
Except when otherwise stated (look for LICENSE files in directories or
information at the beginning of each file) all software and
documentation is licensed as follows:
The MIT License
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,37 @@
Metadata-Version: 2.1
Name: cffi
Version: 1.14.5
Summary: Foreign Function Interface for Python calling C code.
Home-page: http://cffi.readthedocs.org
Author: Armin Rigo, Maciej Fijalkowski
Author-email: python-cffi@googlegroups.com
License: MIT
Platform: UNKNOWN
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: License :: OSI Approved :: MIT License
Requires-Dist: pycparser
CFFI
====
Foreign Function Interface for Python calling C code.
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
Contact
-------
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_

View File

@ -0,0 +1,44 @@
_cffi_backend.cp36-win_amd64.pyd,sha256=IBzxrMFcQqgDiJNXmijS9SVjkYXHZRYJ0L_ESdQ-wMU,181248
cffi-1.14.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cffi-1.14.5.dist-info/LICENSE,sha256=esEZUOct9bRcUXFqeyLnuzSzJNZ_Bl4pOBUt1HLEgV8,1320
cffi-1.14.5.dist-info/METADATA,sha256=9cQJcfX8MjM9nlAXlHcCe-YmRy7Ez9IsA3eSoOdYYWY,1191
cffi-1.14.5.dist-info/RECORD,,
cffi-1.14.5.dist-info/WHEEL,sha256=epucrC2yyYTysDCMzXuz8eGMTMKryzRfNOvMGdslbjc,101
cffi-1.14.5.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76
cffi-1.14.5.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
cffi/__init__.py,sha256=tRPXYJcFAHnje0nGIPZ2N_uY5iuQhVTOYb6prbrYZLQ,527
cffi/__pycache__/__init__.cpython-36.pyc,,
cffi/__pycache__/api.cpython-36.pyc,,
cffi/__pycache__/backend_ctypes.cpython-36.pyc,,
cffi/__pycache__/cffi_opcode.cpython-36.pyc,,
cffi/__pycache__/commontypes.cpython-36.pyc,,
cffi/__pycache__/cparser.cpython-36.pyc,,
cffi/__pycache__/error.cpython-36.pyc,,
cffi/__pycache__/ffiplatform.cpython-36.pyc,,
cffi/__pycache__/lock.cpython-36.pyc,,
cffi/__pycache__/model.cpython-36.pyc,,
cffi/__pycache__/pkgconfig.cpython-36.pyc,,
cffi/__pycache__/recompiler.cpython-36.pyc,,
cffi/__pycache__/setuptools_ext.cpython-36.pyc,,
cffi/__pycache__/vengine_cpy.cpython-36.pyc,,
cffi/__pycache__/vengine_gen.cpython-36.pyc,,
cffi/__pycache__/verifier.cpython-36.pyc,,
cffi/_cffi_errors.h,sha256=INd0GxZQna8TTRYNOOr9_iFy0FZa84I_KH1qlmPgulQ,4003
cffi/_cffi_include.h,sha256=H7cgdZR-POwmUFrIup4jOGzmje8YoQHhN99gVFg7w08,15185
cffi/_embedding.h,sha256=Mm-IuHEH8OGSCrK08NXaZvn6iL_636nZ5NSchyGh05A,18108
cffi/api.py,sha256=Xs_dAN5x1ehfnn_F9ZTdA3Ce0bmPrqeIOkO4Ya1tfbQ,43029
cffi/backend_ctypes.py,sha256=BHN3q2giL2_Y8wMDST2CIcc_qoMrs65qV9Ob5JvxBZ4,43575
cffi/cffi_opcode.py,sha256=57P2NHLZkuTWueZybu5iosWljb6ocQmUXzGrCplrnyE,5911
cffi/commontypes.py,sha256=mEZD4g0qtadnv6O6CEXvMQaJ1K6SRbG5S1h4YvVZHOU,2769
cffi/cparser.py,sha256=CwVk2V3ATYlCoywG6zN35w6UQ7zj2EWX68KjoJp2Mzk,45237
cffi/error.py,sha256=Bka7fSV22aIglTQDPIDfpnxTc1aWZLMQdQOJY-h_PUA,908
cffi/ffiplatform.py,sha256=qioydJeC63dEvrQ3ht5_BPmSs7wzzzuWnZAJtfhic7I,4173
cffi/lock.py,sha256=vnbsel7392Ib8gGBifIfAfc7MHteSwd3nP725pvc25Q,777
cffi/model.py,sha256=HRD0WEYHF2Vr6RjS-4wyncElrZxU2256zY0fbMkSKec,22385
cffi/parse_c_type.h,sha256=fKYNqWNX5f9kZNNhbXcRLTOlpRGRhh8eCLyHmTXIZnQ,6157
cffi/pkgconfig.py,sha256=9zDcDf0XKIJaxFHLg7e-W8-Xb8Yq5hdhqH7kLg-ugRo,4495
cffi/recompiler.py,sha256=LmEalHqs90dgp5od-BiZizsu2M2WJV7S6ctNSxj3FsA,66149
cffi/setuptools_ext.py,sha256=8y14TOlRAkgdczmwtPOahyFXJHNyIqhLjUHMYQmjOHs,9150
cffi/vengine_cpy.py,sha256=ukugKCIsURxJzHxlxS265tGjQfPTFDbThwsqBrwKh-A,44396
cffi/vengine_gen.py,sha256=mykUhLFJIcV6AyQ5cMJ3n_7dbqw0a9WEjXW0E-WfgiI,27359
cffi/verifier.py,sha256=La8rdbEkvdvbqAHDzTk5lsNUvdkqB_GcFnO7wXI6Mgk,11513

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp36-cp36m-win_amd64

View File

@ -0,0 +1,3 @@
[distutils.setup_keywords]
cffi_modules = cffi.setuptools_ext:cffi_modules

View File

@ -0,0 +1,2 @@
_cffi_backend
cffi

View File

@ -0,0 +1,14 @@
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
'FFIError']
from .api import FFI
from .error import CDefError, FFIError, VerificationError, VerificationMissing
from .error import PkgConfigError
__version__ = "1.14.5"
__version_info__ = (1, 14, 5)
# The verifier module file names are based on the CRC32 of a string that
# contains the following version number. It may be older than __version__
# if nothing is clearly incompatible.
__version_verifier_modules__ = "0.8.6"

View File

@ -0,0 +1,147 @@
#ifndef CFFI_MESSAGEBOX
# ifdef _MSC_VER
# define CFFI_MESSAGEBOX 1
# else
# define CFFI_MESSAGEBOX 0
# endif
#endif
#if CFFI_MESSAGEBOX
/* Windows only: logic to take the Python-CFFI embedding logic
initialization errors and display them in a background thread
with MessageBox. The idea is that if the whole program closes
as a result of this problem, then likely it is already a console
program and you can read the stderr output in the console too.
If it is not a console program, then it will likely show its own
dialog to complain, or generally not abruptly close, and for this
case the background thread should stay alive.
*/
static void *volatile _cffi_bootstrap_text;
static PyObject *_cffi_start_error_capture(void)
{
PyObject *result = NULL;
PyObject *x, *m, *bi;
if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
(void *)1, NULL) != NULL)
return (PyObject *)1;
m = PyImport_AddModule("_cffi_error_capture");
if (m == NULL)
goto error;
result = PyModule_GetDict(m);
if (result == NULL)
goto error;
#if PY_MAJOR_VERSION >= 3
bi = PyImport_ImportModule("builtins");
#else
bi = PyImport_ImportModule("__builtin__");
#endif
if (bi == NULL)
goto error;
PyDict_SetItemString(result, "__builtins__", bi);
Py_DECREF(bi);
x = PyRun_String(
"import sys\n"
"class FileLike:\n"
" def write(self, x):\n"
" try:\n"
" of.write(x)\n"
" except: pass\n"
" self.buf += x\n"
"fl = FileLike()\n"
"fl.buf = ''\n"
"of = sys.stderr\n"
"sys.stderr = fl\n"
"def done():\n"
" sys.stderr = of\n"
" return fl.buf\n", /* make sure the returned value stays alive */
Py_file_input,
result, result);
Py_XDECREF(x);
error:
if (PyErr_Occurred())
{
PyErr_WriteUnraisable(Py_None);
PyErr_Clear();
}
return result;
}
#pragma comment(lib, "user32.lib")
static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
{
Sleep(666); /* may be interrupted if the whole process is closing */
#if PY_MAJOR_VERSION >= 3
MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
L"Python-CFFI error",
MB_OK | MB_ICONERROR);
#else
MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
"Python-CFFI error",
MB_OK | MB_ICONERROR);
#endif
_cffi_bootstrap_text = NULL;
return 0;
}
static void _cffi_stop_error_capture(PyObject *ecap)
{
PyObject *s;
void *text;
if (ecap == (PyObject *)1)
return;
if (ecap == NULL)
goto error;
s = PyRun_String("done()", Py_eval_input, ecap, ecap);
if (s == NULL)
goto error;
/* Show a dialog box, but in a background thread, and
never show multiple dialog boxes at once. */
#if PY_MAJOR_VERSION >= 3
text = PyUnicode_AsWideCharString(s, NULL);
#else
text = PyString_AsString(s);
#endif
_cffi_bootstrap_text = text;
if (text != NULL)
{
HANDLE h;
h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
NULL, 0, NULL);
if (h != NULL)
CloseHandle(h);
}
/* decref the string, but it should stay alive as 'fl.buf'
in the small module above. It will really be freed only if
we later get another similar error. So it's a leak of at
most one copy of the small module. That's fine for this
situation which is usually a "fatal error" anyway. */
Py_DECREF(s);
PyErr_Clear();
return;
error:
_cffi_bootstrap_text = NULL;
PyErr_Clear();
}
#else
static PyObject *_cffi_start_error_capture(void) { return NULL; }
static void _cffi_stop_error_capture(PyObject *ecap) { }
#endif

View File

@ -0,0 +1,385 @@
#define _CFFI_
/* We try to define Py_LIMITED_API before including Python.h.
Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
Py_REF_DEBUG are not defined. This is a best-effort approximation:
we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
the same works for the other two macros. Py_DEBUG implies them,
but not the other way around.
The implementation is messy (issue #350): on Windows, with _MSC_VER,
we have to define Py_LIMITED_API even before including pyconfig.h.
In that case, we guess what pyconfig.h will do to the macros above,
and check our guess after the #include.
Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv
version >= 16.0.0. With older versions of either, you don't get a
copy of PYTHON3.DLL in the virtualenv. We can't check the version of
CPython *before* we even include pyconfig.h. ffi.set_source() puts
a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is
running on Windows < 3.5, as an attempt at fixing it, but that's
arguably wrong because it may not be the target version of Python.
Still better than nothing I guess. As another workaround, you can
remove the definition of Py_LIMITED_API here.
See also 'py_limited_api' in cffi/setuptools_ext.py.
*/
#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
# ifdef _MSC_VER
# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
# define Py_LIMITED_API
# endif
# include <pyconfig.h>
/* sanity-check: Py_LIMITED_API will cause crashes if any of these
are also defined. Normally, the Python file PC/pyconfig.h does not
cause any of these to be defined, with the exception that _DEBUG
causes Py_DEBUG. Double-check that. */
# ifdef Py_LIMITED_API
# if defined(Py_DEBUG)
# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set"
# endif
# if defined(Py_TRACE_REFS)
# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set"
# endif
# if defined(Py_REF_DEBUG)
# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set"
# endif
# endif
# else
# include <pyconfig.h>
# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
# define Py_LIMITED_API
# endif
# endif
#endif
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
#include <stddef.h>
#include "parse_c_type.h"
/* this block of #ifs should be kept exactly identical between
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
and cffi/_cffi_include.h */
#if defined(_MSC_VER)
# include <malloc.h> /* for alloca() */
# if _MSC_VER < 1600 /* MSVC < 2010 */
typedef __int8 int8_t;
typedef __int16 int16_t;
typedef __int32 int32_t;
typedef __int64 int64_t;
typedef unsigned __int8 uint8_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int64 uint64_t;
typedef __int8 int_least8_t;
typedef __int16 int_least16_t;
typedef __int32 int_least32_t;
typedef __int64 int_least64_t;
typedef unsigned __int8 uint_least8_t;
typedef unsigned __int16 uint_least16_t;
typedef unsigned __int32 uint_least32_t;
typedef unsigned __int64 uint_least64_t;
typedef __int8 int_fast8_t;
typedef __int16 int_fast16_t;
typedef __int32 int_fast32_t;
typedef __int64 int_fast64_t;
typedef unsigned __int8 uint_fast8_t;
typedef unsigned __int16 uint_fast16_t;
typedef unsigned __int32 uint_fast32_t;
typedef unsigned __int64 uint_fast64_t;
typedef __int64 intmax_t;
typedef unsigned __int64 uintmax_t;
# else
# include <stdint.h>
# endif
# if _MSC_VER < 1800 /* MSVC < 2013 */
# ifndef __cplusplus
typedef unsigned char _Bool;
# endif
# endif
#else
# include <stdint.h>
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
# include <alloca.h>
# endif
#endif
#ifdef __GNUC__
# define _CFFI_UNUSED_FN __attribute__((unused))
#else
# define _CFFI_UNUSED_FN /* nothing */
#endif
#ifdef __cplusplus
# ifndef _Bool
typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
# endif
#endif
/********** CPython-specific section **********/
#ifndef PYPY_VERSION
#if PY_MAJOR_VERSION >= 3
# define PyInt_FromLong PyLong_FromLong
#endif
#define _cffi_from_c_double PyFloat_FromDouble
#define _cffi_from_c_float PyFloat_FromDouble
#define _cffi_from_c_long PyInt_FromLong
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
#define _cffi_from_c_longlong PyLong_FromLongLong
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
#define _cffi_from_c__Bool PyBool_FromLong
#define _cffi_to_c_double PyFloat_AsDouble
#define _cffi_to_c_float PyFloat_AsDouble
#define _cffi_from_c_int(x, type) \
(((type)-1) > 0 ? /* unsigned */ \
(sizeof(type) < sizeof(long) ? \
PyInt_FromLong((long)x) : \
sizeof(type) == sizeof(long) ? \
PyLong_FromUnsignedLong((unsigned long)x) : \
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
(sizeof(type) <= sizeof(long) ? \
PyInt_FromLong((long)x) : \
PyLong_FromLongLong((long long)x)))
#define _cffi_to_c_int(o, type) \
((type)( \
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
: (type)_cffi_to_c_i8(o)) : \
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
: (type)_cffi_to_c_i16(o)) : \
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
: (type)_cffi_to_c_i32(o)) : \
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
: (type)_cffi_to_c_i64(o)) : \
(Py_FatalError("unsupported size for type " #type), (type)0)))
#define _cffi_to_c_i8 \
((int(*)(PyObject *))_cffi_exports[1])
#define _cffi_to_c_u8 \
((int(*)(PyObject *))_cffi_exports[2])
#define _cffi_to_c_i16 \
((int(*)(PyObject *))_cffi_exports[3])
#define _cffi_to_c_u16 \
((int(*)(PyObject *))_cffi_exports[4])
#define _cffi_to_c_i32 \
((int(*)(PyObject *))_cffi_exports[5])
#define _cffi_to_c_u32 \
((unsigned int(*)(PyObject *))_cffi_exports[6])
#define _cffi_to_c_i64 \
((long long(*)(PyObject *))_cffi_exports[7])
#define _cffi_to_c_u64 \
((unsigned long long(*)(PyObject *))_cffi_exports[8])
#define _cffi_to_c_char \
((int(*)(PyObject *))_cffi_exports[9])
#define _cffi_from_c_pointer \
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
#define _cffi_to_c_pointer \
((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
#define _cffi_get_struct_layout \
not used any more
#define _cffi_restore_errno \
((void(*)(void))_cffi_exports[13])
#define _cffi_save_errno \
((void(*)(void))_cffi_exports[14])
#define _cffi_from_c_char \
((PyObject *(*)(char))_cffi_exports[15])
#define _cffi_from_c_deref \
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
#define _cffi_to_c \
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
#define _cffi_from_c_struct \
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
#define _cffi_to_c_wchar_t \
((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
#define _cffi_from_c_wchar_t \
((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
#define _cffi_to_c_long_double \
((long double(*)(PyObject *))_cffi_exports[21])
#define _cffi_to_c__Bool \
((_Bool(*)(PyObject *))_cffi_exports[22])
#define _cffi_prepare_pointer_call_argument \
((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
PyObject *, char **))_cffi_exports[23])
#define _cffi_convert_array_from_object \
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
#define _CFFI_CPIDX 25
#define _cffi_call_python \
((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
#define _cffi_to_c_wchar3216_t \
((int(*)(PyObject *))_cffi_exports[26])
#define _cffi_from_c_wchar3216_t \
((PyObject *(*)(int))_cffi_exports[27])
#define _CFFI_NUM_EXPORTS 28
struct _cffi_ctypedescr;
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
#define _cffi_type(index) ( \
assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
(struct _cffi_ctypedescr *)_cffi_types[index])
static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
const struct _cffi_type_context_s *ctx)
{
PyObject *module, *o_arg, *new_module;
void *raw[] = {
(void *)module_name,
(void *)version,
(void *)_cffi_exports,
(void *)ctx,
};
module = PyImport_ImportModule("_cffi_backend");
if (module == NULL)
goto failure;
o_arg = PyLong_FromVoidPtr((void *)raw);
if (o_arg == NULL)
goto failure;
new_module = PyObject_CallMethod(
module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
Py_DECREF(o_arg);
Py_DECREF(module);
return new_module;
failure:
Py_XDECREF(module);
return NULL;
}
#ifdef HAVE_WCHAR_H
typedef wchar_t _cffi_wchar_t;
#else
typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
#endif
_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
{
if (sizeof(_cffi_wchar_t) == 2)
return (uint16_t)_cffi_to_c_wchar_t(o);
else
return (uint16_t)_cffi_to_c_wchar3216_t(o);
}
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
{
if (sizeof(_cffi_wchar_t) == 2)
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
else
return _cffi_from_c_wchar3216_t((int)x);
}
_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
{
if (sizeof(_cffi_wchar_t) == 4)
return (int)_cffi_to_c_wchar_t(o);
else
return (int)_cffi_to_c_wchar3216_t(o);
}
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
{
if (sizeof(_cffi_wchar_t) == 4)
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
else
return _cffi_from_c_wchar3216_t((int)x);
}
union _cffi_union_alignment_u {
unsigned char m_char;
unsigned short m_short;
unsigned int m_int;
unsigned long m_long;
unsigned long long m_longlong;
float m_float;
double m_double;
long double m_longdouble;
};
struct _cffi_freeme_s {
struct _cffi_freeme_s *next;
union _cffi_union_alignment_u alignment;
};
_CFFI_UNUSED_FN static int
_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
char **output_data, Py_ssize_t datasize,
struct _cffi_freeme_s **freeme)
{
char *p;
if (datasize < 0)
return -1;
p = *output_data;
if (p == NULL) {
struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
if (fp == NULL)
return -1;
fp->next = *freeme;
*freeme = fp;
p = *output_data = (char *)&fp->alignment;
}
memset((void *)p, 0, (size_t)datasize);
return _cffi_convert_array_from_object(p, ctptr, arg);
}
_CFFI_UNUSED_FN static void
_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
{
do {
void *p = (void *)freeme;
freeme = freeme->next;
PyObject_Free(p);
} while (freeme != NULL);
}
/********** end CPython-specific section **********/
#else
_CFFI_UNUSED_FN
static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
# define _cffi_call_python _cffi_call_python_org
#endif
#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
#define _cffi_prim_int(size, sign) \
((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
(size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
(size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
(size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
_CFFI__UNKNOWN_PRIM)
#define _cffi_prim_float(size) \
((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
(size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
(size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
_CFFI__UNKNOWN_FLOAT_PRIM)
#define _cffi_check_int(got, got_nonpos, expected) \
((got_nonpos) == (expected <= 0) && \
(got) == (unsigned long long)expected)
#ifdef MS_WIN32
# define _cffi_stdcall __stdcall
#else
# define _cffi_stdcall /* nothing */
#endif
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,527 @@
/***** Support code for embedding *****/
#ifdef __cplusplus
extern "C" {
#endif
#if defined(_WIN32)
# define CFFI_DLLEXPORT __declspec(dllexport)
#elif defined(__GNUC__)
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
#else
# define CFFI_DLLEXPORT /* nothing */
#endif
/* There are two global variables of type _cffi_call_python_fnptr:
* _cffi_call_python, which we declare just below, is the one called
by ``extern "Python"`` implementations.
* _cffi_call_python_org, which on CPython is actually part of the
_cffi_exports[] array, is the function pointer copied from
_cffi_backend.
After initialization is complete, both are equal. However, the
first one remains equal to &_cffi_start_and_call_python until the
very end of initialization, when we are (or should be) sure that
concurrent threads also see a completely initialized world, and
only then is it changed.
*/
#undef _cffi_call_python
typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
#ifndef _MSC_VER
/* --- Assuming a GCC not infinitely old --- */
# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
# define cffi_write_barrier() __sync_synchronize()
# if !defined(__amd64__) && !defined(__x86_64__) && \
!defined(__i386__) && !defined(__i386)
# define cffi_read_barrier() __sync_synchronize()
# else
# define cffi_read_barrier() (void)0
# endif
#else
/* --- Windows threads version --- */
# include <Windows.h>
# define cffi_compare_and_swap(l,o,n) \
(InterlockedCompareExchangePointer(l,n,o) == (o))
# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
# define cffi_read_barrier() (void)0
static volatile LONG _cffi_dummy;
#endif
#ifdef WITH_THREAD
# ifndef _MSC_VER
# include <pthread.h>
static pthread_mutex_t _cffi_embed_startup_lock;
# else
static CRITICAL_SECTION _cffi_embed_startup_lock;
# endif
static char _cffi_embed_startup_lock_ready = 0;
#endif
static void _cffi_acquire_reentrant_mutex(void)
{
static void *volatile lock = NULL;
while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
/* should ideally do a spin loop instruction here, but
hard to do it portably and doesn't really matter I
think: pthread_mutex_init() should be very fast, and
this is only run at start-up anyway. */
}
#ifdef WITH_THREAD
if (!_cffi_embed_startup_lock_ready) {
# ifndef _MSC_VER
pthread_mutexattr_t attr;
pthread_mutexattr_init(&attr);
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
# else
InitializeCriticalSection(&_cffi_embed_startup_lock);
# endif
_cffi_embed_startup_lock_ready = 1;
}
#endif
while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
;
#ifndef _MSC_VER
pthread_mutex_lock(&_cffi_embed_startup_lock);
#else
EnterCriticalSection(&_cffi_embed_startup_lock);
#endif
}
static void _cffi_release_reentrant_mutex(void)
{
#ifndef _MSC_VER
pthread_mutex_unlock(&_cffi_embed_startup_lock);
#else
LeaveCriticalSection(&_cffi_embed_startup_lock);
#endif
}
/********** CPython-specific section **********/
#ifndef PYPY_VERSION
#include "_cffi_errors.h"
#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
static void _cffi_py_initialize(void)
{
/* XXX use initsigs=0, which "skips initialization registration of
signal handlers, which might be useful when Python is
embedded" according to the Python docs. But review and think
if it should be a user-controllable setting.
XXX we should also give a way to write errors to a buffer
instead of to stderr.
XXX if importing 'site' fails, CPython (any version) calls
exit(). Should we try to work around this behavior here?
*/
Py_InitializeEx(0);
}
static int _cffi_initialize_python(void)
{
/* This initializes Python, imports _cffi_backend, and then the
present .dll/.so is set up as a CPython C extension module.
*/
int result;
PyGILState_STATE state;
PyObject *pycode=NULL, *global_dict=NULL, *x;
PyObject *builtins;
state = PyGILState_Ensure();
/* Call the initxxx() function from the present module. It will
create and initialize us as a CPython extension module, instead
of letting the startup Python code do it---it might reimport
the same .dll/.so and get maybe confused on some platforms.
It might also have troubles locating the .dll/.so again for all
I know.
*/
(void)_CFFI_PYTHON_STARTUP_FUNC();
if (PyErr_Occurred())
goto error;
/* Now run the Python code provided to ffi.embedding_init_code().
*/
pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
"<init code for '" _CFFI_MODULE_NAME "'>",
Py_file_input);
if (pycode == NULL)
goto error;
global_dict = PyDict_New();
if (global_dict == NULL)
goto error;
builtins = PyEval_GetBuiltins();
if (builtins == NULL)
goto error;
if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
goto error;
x = PyEval_EvalCode(
#if PY_MAJOR_VERSION < 3
(PyCodeObject *)
#endif
pycode, global_dict, global_dict);
if (x == NULL)
goto error;
Py_DECREF(x);
/* Done! Now if we've been called from
_cffi_start_and_call_python() in an ``extern "Python"``, we can
only hope that the Python code did correctly set up the
corresponding @ffi.def_extern() function. Otherwise, the
general logic of ``extern "Python"`` functions (inside the
_cffi_backend module) will find that the reference is still
missing and print an error.
*/
result = 0;
done:
Py_XDECREF(pycode);
Py_XDECREF(global_dict);
PyGILState_Release(state);
return result;
error:;
{
/* Print as much information as potentially useful.
Debugging load-time failures with embedding is not fun
*/
PyObject *ecap;
PyObject *exception, *v, *tb, *f, *modules, *mod;
PyErr_Fetch(&exception, &v, &tb);
ecap = _cffi_start_error_capture();
f = PySys_GetObject((char *)"stderr");
if (f != NULL && f != Py_None) {
PyFile_WriteString(
"Failed to initialize the Python-CFFI embedding logic:\n\n", f);
}
if (exception != NULL) {
PyErr_NormalizeException(&exception, &v, &tb);
PyErr_Display(exception, v, tb);
}
Py_XDECREF(exception);
Py_XDECREF(v);
Py_XDECREF(tb);
if (f != NULL && f != Py_None) {
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
"\ncompiled with cffi version: 1.14.5"
"\n_cffi_backend module: ", f);
modules = PyImport_GetModuleDict();
mod = PyDict_GetItemString(modules, "_cffi_backend");
if (mod == NULL) {
PyFile_WriteString("not loaded", f);
}
else {
v = PyObject_GetAttrString(mod, "__file__");
PyFile_WriteObject(v, f, 0);
Py_XDECREF(v);
}
PyFile_WriteString("\nsys.path: ", f);
PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
PyFile_WriteString("\n\n", f);
}
_cffi_stop_error_capture(ecap);
}
result = -1;
goto done;
}
#if PY_VERSION_HEX < 0x03080000
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
#endif
static int _cffi_carefully_make_gil(void)
{
/* This does the basic initialization of Python. It can be called
completely concurrently from unrelated threads. It assumes
that we don't hold the GIL before (if it exists), and we don't
hold it afterwards.
(What it really does used to be completely different in Python 2
and Python 3, with the Python 2 solution avoiding the spin-lock
around the Py_InitializeEx() call. However, after recent changes
to CPython 2.7 (issue #358) it no longer works. So we use the
Python 3 solution everywhere.)
This initializes Python by calling Py_InitializeEx().
Important: this must not be called concurrently at all.
So we use a global variable as a simple spin lock. This global
variable must be from 'libpythonX.Y.so', not from this
cffi-based extension module, because it must be shared from
different cffi-based extension modules.
In Python < 3.8, we choose
_PyParser_TokenNames[0] as a completely arbitrary pointer value
that is never written to. The default is to point to the
string "ENDMARKER". We change it temporarily to point to the
next character in that string. (Yes, I know it's REALLY
obscure.)
In Python >= 3.8, this string array is no longer writable, so
instead we pick PyCapsuleType.tp_version_tag. We can't change
Python < 3.8 because someone might use a mixture of cffi
embedded modules, some of which were compiled before this file
changed.
*/
#ifdef WITH_THREAD
# if PY_VERSION_HEX < 0x03080000
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
char *old_value, *locked_value;
while (1) { /* spin loop */
old_value = *lock;
locked_value = old_value + 1;
if (old_value[0] == 'E') {
assert(old_value[1] == 'N');
if (cffi_compare_and_swap(lock, old_value, locked_value))
break;
}
else {
assert(old_value[0] == 'N');
/* should ideally do a spin loop instruction here, but
hard to do it portably and doesn't really matter I
think: PyEval_InitThreads() should be very fast, and
this is only run at start-up anyway. */
}
}
# else
int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
int old_value, locked_value;
assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
while (1) { /* spin loop */
old_value = *lock;
locked_value = -42;
if (old_value == 0) {
if (cffi_compare_and_swap(lock, old_value, locked_value))
break;
}
else {
assert(old_value == locked_value);
/* should ideally do a spin loop instruction here, but
hard to do it portably and doesn't really matter I
think: PyEval_InitThreads() should be very fast, and
this is only run at start-up anyway. */
}
}
# endif
#endif
/* call Py_InitializeEx() */
if (!Py_IsInitialized()) {
_cffi_py_initialize();
#if PY_VERSION_HEX < 0x03070000
PyEval_InitThreads();
#endif
PyEval_SaveThread(); /* release the GIL */
/* the returned tstate must be the one that has been stored into the
autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
}
else {
#if PY_VERSION_HEX < 0x03070000
/* PyEval_InitThreads() is always a no-op from CPython 3.7 */
PyGILState_STATE state = PyGILState_Ensure();
PyEval_InitThreads();
PyGILState_Release(state);
#endif
}
#ifdef WITH_THREAD
/* release the lock */
while (!cffi_compare_and_swap(lock, locked_value, old_value))
;
#endif
return 0;
}
/********** end CPython-specific section **********/
#else
/********** PyPy-specific section **********/
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
static struct _cffi_pypy_init_s {
const char *name;
void *func; /* function pointer */
const char *code;
} _cffi_pypy_init = {
_CFFI_MODULE_NAME,
_CFFI_PYTHON_STARTUP_FUNC,
_CFFI_PYTHON_STARTUP_CODE,
};
extern int pypy_carefully_make_gil(const char *);
extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
static int _cffi_carefully_make_gil(void)
{
return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
}
static int _cffi_initialize_python(void)
{
return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
}
/********** end PyPy-specific section **********/
#endif
#ifdef __GNUC__
__attribute__((noinline))
#endif
static _cffi_call_python_fnptr _cffi_start_python(void)
{
/* Delicate logic to initialize Python. This function can be
called multiple times concurrently, e.g. when the process calls
its first ``extern "Python"`` functions in multiple threads at
once. It can also be called recursively, in which case we must
ignore it. We also have to consider what occurs if several
different cffi-based extensions reach this code in parallel
threads---it is a different copy of the code, then, and we
can't have any shared global variable unless it comes from
'libpythonX.Y.so'.
Idea:
* _cffi_carefully_make_gil(): "carefully" call
PyEval_InitThreads() (possibly with Py_InitializeEx() first).
* then we use a (local) custom lock to make sure that a call to this
cffi-based extension will wait if another call to the *same*
extension is running the initialization in another thread.
It is reentrant, so that a recursive call will not block, but
only one from a different thread.
* then we grab the GIL and (Python 2) we call Py_InitializeEx().
At this point, concurrent calls to Py_InitializeEx() are not
possible: we have the GIL.
* do the rest of the specific initialization, which may
temporarily release the GIL but not the custom lock.
Only release the custom lock when we are done.
*/
static char called = 0;
if (_cffi_carefully_make_gil() != 0)
return NULL;
_cffi_acquire_reentrant_mutex();
/* Here the GIL exists, but we don't have it. We're only protected
from concurrency by the reentrant mutex. */
/* This file only initializes the embedded module once, the first
time this is called, even if there are subinterpreters. */
if (!called) {
called = 1; /* invoke _cffi_initialize_python() only once,
but don't set '_cffi_call_python' right now,
otherwise concurrent threads won't call
this function at all (we need them to wait) */
if (_cffi_initialize_python() == 0) {
/* now initialization is finished. Switch to the fast-path. */
/* We would like nobody to see the new value of
'_cffi_call_python' without also seeing the rest of the
data initialized. However, this is not possible. But
the new value of '_cffi_call_python' is the function
'cffi_call_python()' from _cffi_backend. So: */
cffi_write_barrier();
/* ^^^ we put a write barrier here, and a corresponding
read barrier at the start of cffi_call_python(). This
ensures that after that read barrier, we see everything
done here before the write barrier.
*/
assert(_cffi_call_python_org != NULL);
_cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
}
else {
/* initialization failed. Reset this to NULL, even if it was
already set to some other value. Future calls to
_cffi_start_python() are still forced to occur, and will
always return NULL from now on. */
_cffi_call_python_org = NULL;
}
}
_cffi_release_reentrant_mutex();
return (_cffi_call_python_fnptr)_cffi_call_python_org;
}
static
void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
{
_cffi_call_python_fnptr fnptr;
int current_err = errno;
#ifdef _MSC_VER
int current_lasterr = GetLastError();
#endif
fnptr = _cffi_start_python();
if (fnptr == NULL) {
fprintf(stderr, "function %s() called, but initialization code "
"failed. Returning 0.\n", externpy->name);
memset(args, 0, externpy->size_of_result);
}
#ifdef _MSC_VER
SetLastError(current_lasterr);
#endif
errno = current_err;
if (fnptr != NULL)
fnptr(externpy, args);
}
/* The cffi_start_python() function makes sure Python is initialized
and our cffi module is set up. It can be called manually from the
user C code. The same effect is obtained automatically from any
dll-exported ``extern "Python"`` function. This function returns
-1 if initialization failed, 0 if all is OK. */
_CFFI_UNUSED_FN
static int cffi_start_python(void)
{
if (_cffi_call_python == &_cffi_start_and_call_python) {
if (_cffi_start_python() == NULL)
return -1;
}
cffi_read_barrier();
return 0;
}
#undef cffi_compare_and_swap
#undef cffi_write_barrier
#undef cffi_read_barrier
#ifdef __cplusplus
}
#endif

View File

@ -0,0 +1,965 @@
import sys, types
from .lock import allocate_lock
from .error import CDefError
from . import model
try:
callable
except NameError:
# Python 3.1
from collections import Callable
callable = lambda x: isinstance(x, Callable)
try:
basestring
except NameError:
# Python 3.x
basestring = str
_unspecified = object()
class FFI(object):
r'''
The main top-level class that you instantiate once, or once per module.
Example usage:
ffi = FFI()
ffi.cdef("""
int printf(const char *, ...);
""")
C = ffi.dlopen(None) # standard library
-or-
C = ffi.verify() # use a C compiler: verify the decl above is right
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
'''
def __init__(self, backend=None):
"""Create an FFI instance. The 'backend' argument is used to
select a non-default backend, mostly for tests.
"""
if backend is None:
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
# _cffi_backend.so compiled.
import _cffi_backend as backend
from . import __version__
if backend.__version__ != __version__:
# bad version! Try to be as explicit as possible.
if hasattr(backend, '__file__'):
# CPython
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
__version__, __file__,
backend.__version__, backend.__file__))
else:
# PyPy
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
__version__, __file__, backend.__version__))
# (If you insist you can also try to pass the option
# 'backend=backend_ctypes.CTypesBackend()', but don't
# rely on it! It's probably not going to work well.)
from . import cparser
self._backend = backend
self._lock = allocate_lock()
self._parser = cparser.Parser()
self._cached_btypes = {}
self._parsed_types = types.ModuleType('parsed_types').__dict__
self._new_types = types.ModuleType('new_types').__dict__
self._function_caches = []
self._libraries = []
self._cdefsources = []
self._included_ffis = []
self._windows_unicode = None
self._init_once_cache = {}
self._cdef_version = None
self._embedding = None
self._typecache = model.get_typecache(backend)
if hasattr(backend, 'set_ffi'):
backend.set_ffi(self)
for name in list(backend.__dict__):
if name.startswith('RTLD_'):
setattr(self, name, getattr(backend, name))
#
with self._lock:
self.BVoidP = self._get_cached_btype(model.voidp_type)
self.BCharA = self._get_cached_btype(model.char_array_type)
if isinstance(backend, types.ModuleType):
# _cffi_backend: attach these constants to the class
if not hasattr(FFI, 'NULL'):
FFI.NULL = self.cast(self.BVoidP, 0)
FFI.CData, FFI.CType = backend._get_types()
else:
# ctypes backend: attach these constants to the instance
self.NULL = self.cast(self.BVoidP, 0)
self.CData, self.CType = backend._get_types()
self.buffer = backend.buffer
def cdef(self, csource, override=False, packed=False, pack=None):
"""Parse the given C source. This registers all declared functions,
types, and global variables. The functions and global variables can
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
The types can be used in 'ffi.new()' and other functions.
If 'packed' is specified as True, all structs declared inside this
cdef are packed, i.e. laid out without any field alignment at all.
Alternatively, 'pack' can be a small integer, and requests for
alignment greater than that are ignored (pack=1 is equivalent to
packed=True).
"""
self._cdef(csource, override=override, packed=packed, pack=pack)
def embedding_api(self, csource, packed=False, pack=None):
self._cdef(csource, packed=packed, pack=pack, dllexport=True)
if self._embedding is None:
self._embedding = ''
def _cdef(self, csource, override=False, **options):
if not isinstance(csource, str): # unicode, on Python 2
if not isinstance(csource, basestring):
raise TypeError("cdef() argument must be a string")
csource = csource.encode('ascii')
with self._lock:
self._cdef_version = object()
self._parser.parse(csource, override=override, **options)
self._cdefsources.append(csource)
if override:
for cache in self._function_caches:
cache.clear()
finishlist = self._parser._recomplete
if finishlist:
self._parser._recomplete = []
for tp in finishlist:
tp.finish_backend_type(self, finishlist)
def dlopen(self, name, flags=0):
"""Load and return a dynamic library identified by 'name'.
The standard C library can be loaded by passing None.
Note that functions and types declared by 'ffi.cdef()' are not
linked to a particular library, just like C headers; in the
library we only look for the actual (untyped) symbols.
"""
if not (isinstance(name, basestring) or
name is None or
isinstance(name, self.CData)):
raise TypeError("dlopen(name): name must be a file name, None, "
"or an already-opened 'void *' handle")
with self._lock:
lib, function_cache = _make_ffi_library(self, name, flags)
self._function_caches.append(function_cache)
self._libraries.append(lib)
return lib
def dlclose(self, lib):
"""Close a library obtained with ffi.dlopen(). After this call,
access to functions or variables from the library will fail
(possibly with a segmentation fault).
"""
type(lib).__cffi_close__(lib)
def _typeof_locked(self, cdecl):
# call me with the lock!
key = cdecl
if key in self._parsed_types:
return self._parsed_types[key]
#
if not isinstance(cdecl, str): # unicode, on Python 2
cdecl = cdecl.encode('ascii')
#
type = self._parser.parse_type(cdecl)
really_a_function_type = type.is_raw_function
if really_a_function_type:
type = type.as_function_pointer()
btype = self._get_cached_btype(type)
result = btype, really_a_function_type
self._parsed_types[key] = result
return result
def _typeof(self, cdecl, consider_function_as_funcptr=False):
# string -> ctype object
try:
result = self._parsed_types[cdecl]
except KeyError:
with self._lock:
result = self._typeof_locked(cdecl)
#
btype, really_a_function_type = result
if really_a_function_type and not consider_function_as_funcptr:
raise CDefError("the type %r is a function type, not a "
"pointer-to-function type" % (cdecl,))
return btype
def typeof(self, cdecl):
"""Parse the C type given as a string and return the
corresponding <ctype> object.
It can also be used on 'cdata' instance to get its C type.
"""
if isinstance(cdecl, basestring):
return self._typeof(cdecl)
if isinstance(cdecl, self.CData):
return self._backend.typeof(cdecl)
if isinstance(cdecl, types.BuiltinFunctionType):
res = _builtin_function_type(cdecl)
if res is not None:
return res
if (isinstance(cdecl, types.FunctionType)
and hasattr(cdecl, '_cffi_base_type')):
with self._lock:
return self._get_cached_btype(cdecl._cffi_base_type)
raise TypeError(type(cdecl))
def sizeof(self, cdecl):
"""Return the size in bytes of the argument. It can be a
string naming a C type, or a 'cdata' instance.
"""
if isinstance(cdecl, basestring):
BType = self._typeof(cdecl)
return self._backend.sizeof(BType)
else:
return self._backend.sizeof(cdecl)
def alignof(self, cdecl):
"""Return the natural alignment size in bytes of the C type
given as a string.
"""
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
return self._backend.alignof(cdecl)
def offsetof(self, cdecl, *fields_or_indexes):
"""Return the offset of the named field inside the given
structure or array, which must be given as a C type name.
You can give several field names in case of nested structures.
You can also give numeric values which correspond to array
items, in case of an array type.
"""
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
def new(self, cdecl, init=None):
"""Allocate an instance according to the specified C type and
return a pointer to it. The specified C type must be either a
pointer or an array: ``new('X *')`` allocates an X and returns
a pointer to it, whereas ``new('X[n]')`` allocates an array of
n X'es and returns an array referencing it (which works
mostly like a pointer, like in C). You can also use
``new('X[]', n)`` to allocate an array of a non-constant
length n.
The memory is initialized following the rules of declaring a
global variable in C: by default it is zero-initialized, but
an explicit initializer can be given which can be used to
fill all or part of the memory.
When the returned <cdata> object goes out of scope, the memory
is freed. In other words the returned <cdata> object has
ownership of the value of type 'cdecl' that it points to. This
means that the raw data can be used as long as this object is
kept alive, but must not be used for a longer time. Be careful
about that when copying the pointer to the memory somewhere
else, e.g. into another structure.
"""
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
return self._backend.newp(cdecl, init)
def new_allocator(self, alloc=None, free=None,
should_clear_after_alloc=True):
"""Return a new allocator, i.e. a function that behaves like ffi.new()
but uses the provided low-level 'alloc' and 'free' functions.
'alloc' is called with the size as argument. If it returns NULL, a
MemoryError is raised. 'free' is called with the result of 'alloc'
as argument. Both can be either Python function or directly C
functions. If 'free' is None, then no free function is called.
If both 'alloc' and 'free' are None, the default is used.
If 'should_clear_after_alloc' is set to False, then the memory
returned by 'alloc' is assumed to be already cleared (or you are
fine with garbage); otherwise CFFI will clear it.
"""
compiled_ffi = self._backend.FFI()
allocator = compiled_ffi.new_allocator(alloc, free,
should_clear_after_alloc)
def allocate(cdecl, init=None):
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
return allocator(cdecl, init)
return allocate
def cast(self, cdecl, source):
"""Similar to a C cast: returns an instance of the named C
type initialized with the given 'source'. The source is
casted between integers or pointers of any type.
"""
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
return self._backend.cast(cdecl, source)
def string(self, cdata, maxlen=-1):
"""Return a Python string (or unicode string) from the 'cdata'.
If 'cdata' is a pointer or array of characters or bytes, returns
the null-terminated string. The returned string extends until
the first null character, or at most 'maxlen' characters. If
'cdata' is an array then 'maxlen' defaults to its length.
If 'cdata' is a pointer or array of wchar_t, returns a unicode
string following the same rules.
If 'cdata' is a single character or byte or a wchar_t, returns
it as a string or unicode string.
If 'cdata' is an enum, returns the value of the enumerator as a
string, or 'NUMBER' if the value is out of range.
"""
return self._backend.string(cdata, maxlen)
def unpack(self, cdata, length):
"""Unpack an array of C data of the given length,
returning a Python string/unicode/list.
If 'cdata' is a pointer to 'char', returns a byte string.
It does not stop at the first null. This is equivalent to:
ffi.buffer(cdata, length)[:]
If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
'length' is measured in wchar_t's; it is not the size in bytes.
If 'cdata' is a pointer to anything else, returns a list of
'length' items. This is a faster equivalent to:
[cdata[i] for i in range(length)]
"""
return self._backend.unpack(cdata, length)
#def buffer(self, cdata, size=-1):
# """Return a read-write buffer object that references the raw C data
# pointed to by the given 'cdata'. The 'cdata' must be a pointer or
# an array. Can be passed to functions expecting a buffer, or directly
# manipulated with:
#
# buf[:] get a copy of it in a regular string, or
# buf[idx] as a single character
# buf[:] = ...
# buf[idx] = ... change the content
# """
# note that 'buffer' is a type, set on this instance by __init__
def from_buffer(self, cdecl, python_buffer=_unspecified,
require_writable=False):
"""Return a cdata of the given type pointing to the data of the
given Python object, which must support the buffer interface.
Note that this is not meant to be used on the built-in types
str or unicode (you can build 'char[]' arrays explicitly)
but only on objects containing large quantities of raw data
in some other format, like 'array.array' or numpy arrays.
The first argument is optional and default to 'char[]'.
"""
if python_buffer is _unspecified:
cdecl, python_buffer = self.BCharA, cdecl
elif isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
return self._backend.from_buffer(cdecl, python_buffer,
require_writable)
def memmove(self, dest, src, n):
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
Like the C function memmove(), the memory areas may overlap;
apart from that it behaves like the C function memcpy().
'src' can be any cdata ptr or array, or any Python buffer object.
'dest' can be any cdata ptr or array, or a writable Python buffer
object. The size to copy, 'n', is always measured in bytes.
Unlike other methods, this one supports all Python buffer including
byte strings and bytearrays---but it still does not support
non-contiguous buffers.
"""
return self._backend.memmove(dest, src, n)
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
"""Return a callback object or a decorator making such a
callback object. 'cdecl' must name a C function pointer type.
The callback invokes the specified 'python_callable' (which may
be provided either directly or via a decorator). Important: the
callback object must be manually kept alive for as long as the
callback may be invoked from the C level.
"""
def callback_decorator_wrap(python_callable):
if not callable(python_callable):
raise TypeError("the 'python_callable' argument "
"is not callable")
return self._backend.callback(cdecl, python_callable,
error, onerror)
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
if python_callable is None:
return callback_decorator_wrap # decorator mode
else:
return callback_decorator_wrap(python_callable) # direct mode
def getctype(self, cdecl, replace_with=''):
"""Return a string giving the C type 'cdecl', which may be itself
a string or a <ctype> object. If 'replace_with' is given, it gives
extra text to append (or insert for more complicated C types), like
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
"""
if isinstance(cdecl, basestring):
cdecl = self._typeof(cdecl)
replace_with = replace_with.strip()
if (replace_with.startswith('*')
and '&[' in self._backend.getcname(cdecl, '&')):
replace_with = '(%s)' % replace_with
elif replace_with and not replace_with[0] in '[(':
replace_with = ' ' + replace_with
return self._backend.getcname(cdecl, replace_with)
def gc(self, cdata, destructor, size=0):
"""Return a new cdata object that points to the same
data. Later, when this new cdata object is garbage-collected,
'destructor(old_cdata_object)' will be called.
The optional 'size' gives an estimate of the size, used to
trigger the garbage collection more eagerly. So far only used
on PyPy. It tells the GC that the returned object keeps alive
roughly 'size' bytes of external memory.
"""
return self._backend.gcp(cdata, destructor, size)
def _get_cached_btype(self, type):
assert self._lock.acquire(False) is False
# call me with the lock!
try:
BType = self._cached_btypes[type]
except KeyError:
finishlist = []
BType = type.get_cached_btype(self, finishlist)
for type in finishlist:
type.finish_backend_type(self, finishlist)
return BType
def verify(self, source='', tmpdir=None, **kwargs):
"""Verify that the current ffi signatures compile on this
machine, and return a dynamic library object. The dynamic
library can be used to call functions and access global
variables declared in this 'ffi'. The library is compiled
by the C compiler: it gives you C-level API compatibility
(including calling macros). This is unlike 'ffi.dlopen()',
which requires binary compatibility in the signatures.
"""
from .verifier import Verifier, _caller_dir_pycache
#
# If set_unicode(True) was called, insert the UNICODE and
# _UNICODE macro declarations
if self._windows_unicode:
self._apply_windows_unicode(kwargs)
#
# Set the tmpdir here, and not in Verifier.__init__: it picks
# up the caller's directory, which we want to be the caller of
# ffi.verify(), as opposed to the caller of Veritier().
tmpdir = tmpdir or _caller_dir_pycache()
#
# Make a Verifier() and use it to load the library.
self.verifier = Verifier(self, source, tmpdir, **kwargs)
lib = self.verifier.load_library()
#
# Save the loaded library for keep-alive purposes, even
# if the caller doesn't keep it alive itself (it should).
self._libraries.append(lib)
return lib
def _get_errno(self):
return self._backend.get_errno()
def _set_errno(self, errno):
self._backend.set_errno(errno)
errno = property(_get_errno, _set_errno, None,
"the value of 'errno' from/to the C calls")
def getwinerror(self, code=-1):
return self._backend.getwinerror(code)
def _pointer_to(self, ctype):
with self._lock:
return model.pointer_cache(self, ctype)
def addressof(self, cdata, *fields_or_indexes):
"""Return the address of a <cdata 'struct-or-union'>.
If 'fields_or_indexes' are given, returns the address of that
field or array item in the structure or array, recursively in
case of nested structures.
"""
try:
ctype = self._backend.typeof(cdata)
except TypeError:
if '__addressof__' in type(cdata).__dict__:
return type(cdata).__addressof__(cdata, *fields_or_indexes)
raise
if fields_or_indexes:
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
else:
if ctype.kind == "pointer":
raise TypeError("addressof(pointer)")
offset = 0
ctypeptr = self._pointer_to(ctype)
return self._backend.rawaddressof(ctypeptr, cdata, offset)
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
for field1 in fields_or_indexes:
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
offset += offset1
return ctype, offset
def include(self, ffi_to_include):
"""Includes the typedefs, structs, unions and enums defined
in another FFI instance. Usage is similar to a #include in C,
where a part of the program might include types defined in
another part for its own usage. Note that the include()
method has no effect on functions, constants and global
variables, which must anyway be accessed directly from the
lib object returned by the original FFI instance.
"""
if not isinstance(ffi_to_include, FFI):
raise TypeError("ffi.include() expects an argument that is also of"
" type cffi.FFI, not %r" % (
type(ffi_to_include).__name__,))
if ffi_to_include is self:
raise ValueError("self.include(self)")
with ffi_to_include._lock:
with self._lock:
self._parser.include(ffi_to_include._parser)
self._cdefsources.append('[')
self._cdefsources.extend(ffi_to_include._cdefsources)
self._cdefsources.append(']')
self._included_ffis.append(ffi_to_include)
def new_handle(self, x):
return self._backend.newp_handle(self.BVoidP, x)
def from_handle(self, x):
return self._backend.from_handle(x)
def release(self, x):
self._backend.release(x)
def set_unicode(self, enabled_flag):
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
to be (pointers to) wchar_t. If 'enabled_flag' is False,
declare these types to be (pointers to) plain 8-bit characters.
This is mostly for backward compatibility; you usually want True.
"""
if self._windows_unicode is not None:
raise ValueError("set_unicode() can only be called once")
enabled_flag = bool(enabled_flag)
if enabled_flag:
self.cdef("typedef wchar_t TBYTE;"
"typedef wchar_t TCHAR;"
"typedef const wchar_t *LPCTSTR;"
"typedef const wchar_t *PCTSTR;"
"typedef wchar_t *LPTSTR;"
"typedef wchar_t *PTSTR;"
"typedef TBYTE *PTBYTE;"
"typedef TCHAR *PTCHAR;")
else:
self.cdef("typedef char TBYTE;"
"typedef char TCHAR;"
"typedef const char *LPCTSTR;"
"typedef const char *PCTSTR;"
"typedef char *LPTSTR;"
"typedef char *PTSTR;"
"typedef TBYTE *PTBYTE;"
"typedef TCHAR *PTCHAR;")
self._windows_unicode = enabled_flag
def _apply_windows_unicode(self, kwds):
defmacros = kwds.get('define_macros', ())
if not isinstance(defmacros, (list, tuple)):
raise TypeError("'define_macros' must be a list or tuple")
defmacros = list(defmacros) + [('UNICODE', '1'),
('_UNICODE', '1')]
kwds['define_macros'] = defmacros
def _apply_embedding_fix(self, kwds):
# must include an argument like "-lpython2.7" for the compiler
def ensure(key, value):
lst = kwds.setdefault(key, [])
if value not in lst:
lst.append(value)
#
if '__pypy__' in sys.builtin_module_names:
import os
if sys.platform == "win32":
# we need 'libpypy-c.lib'. Current distributions of
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
if hasattr(sys, 'prefix'):
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
else:
# we need 'libpypy-c.{so,dylib}', which should be by
# default located in 'sys.prefix/bin' for installed
# systems.
if sys.version_info < (3,):
pythonlib = "pypy-c"
else:
pythonlib = "pypy3-c"
if hasattr(sys, 'prefix'):
ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
# On uninstalled pypy's, the libpypy-c is typically found in
# .../pypy/goal/.
if hasattr(sys, 'prefix'):
ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
else:
if sys.platform == "win32":
template = "python%d%d"
if hasattr(sys, 'gettotalrefcount'):
template += '_d'
else:
try:
import sysconfig
except ImportError: # 2.6
from distutils import sysconfig
template = "python%d.%d"
if sysconfig.get_config_var('DEBUG_EXT'):
template += sysconfig.get_config_var('DEBUG_EXT')
pythonlib = (template %
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
if hasattr(sys, 'abiflags'):
pythonlib += sys.abiflags
ensure('libraries', pythonlib)
if sys.platform == "win32":
ensure('extra_link_args', '/MANIFEST')
def set_source(self, module_name, source, source_extension='.c', **kwds):
import os
if hasattr(self, '_assigned_source'):
raise ValueError("set_source() cannot be called several times "
"per ffi object")
if not isinstance(module_name, basestring):
raise TypeError("'module_name' must be a string")
if os.sep in module_name or (os.altsep and os.altsep in module_name):
raise ValueError("'module_name' must not contain '/': use a dotted "
"name to make a 'package.module' location")
self._assigned_source = (str(module_name), source,
source_extension, kwds)
def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
source_extension='.c', **kwds):
from . import pkgconfig
if not isinstance(pkgconfig_libs, list):
raise TypeError("the pkgconfig_libs argument must be a list "
"of package names")
kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
pkgconfig.merge_flags(kwds, kwds2)
self.set_source(module_name, source, source_extension, **kwds)
def distutils_extension(self, tmpdir='build', verbose=True):
from distutils.dir_util import mkpath
from .recompiler import recompile
#
if not hasattr(self, '_assigned_source'):
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
return self.verifier.get_extension()
raise ValueError("set_source() must be called before"
" distutils_extension()")
module_name, source, source_extension, kwds = self._assigned_source
if source is None:
raise TypeError("distutils_extension() is only for C extension "
"modules, not for dlopen()-style pure Python "
"modules")
mkpath(tmpdir)
ext, updated = recompile(self, module_name,
source, tmpdir=tmpdir, extradir=tmpdir,
source_extension=source_extension,
call_c_compiler=False, **kwds)
if verbose:
if updated:
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
else:
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
return ext
def emit_c_code(self, filename):
from .recompiler import recompile
#
if not hasattr(self, '_assigned_source'):
raise ValueError("set_source() must be called before emit_c_code()")
module_name, source, source_extension, kwds = self._assigned_source
if source is None:
raise TypeError("emit_c_code() is only for C extension modules, "
"not for dlopen()-style pure Python modules")
recompile(self, module_name, source,
c_file=filename, call_c_compiler=False, **kwds)
def emit_python_code(self, filename):
from .recompiler import recompile
#
if not hasattr(self, '_assigned_source'):
raise ValueError("set_source() must be called before emit_c_code()")
module_name, source, source_extension, kwds = self._assigned_source
if source is not None:
raise TypeError("emit_python_code() is only for dlopen()-style "
"pure Python modules, not for C extension modules")
recompile(self, module_name, source,
c_file=filename, call_c_compiler=False, **kwds)
def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
"""The 'target' argument gives the final file name of the
compiled DLL. Use '*' to force distutils' choice, suitable for
regular CPython C API modules. Use a file name ending in '.*'
to ask for the system's default extension for dynamic libraries
(.so/.dll/.dylib).
The default is '*' when building a non-embedded C API extension,
and (module_name + '.*') when building an embedded library.
"""
from .recompiler import recompile
#
if not hasattr(self, '_assigned_source'):
raise ValueError("set_source() must be called before compile()")
module_name, source, source_extension, kwds = self._assigned_source
return recompile(self, module_name, source, tmpdir=tmpdir,
target=target, source_extension=source_extension,
compiler_verbose=verbose, debug=debug, **kwds)
def init_once(self, func, tag):
# Read _init_once_cache[tag], which is either (False, lock) if
# we're calling the function now in some thread, or (True, result).
# Don't call setdefault() in most cases, to avoid allocating and
# immediately freeing a lock; but still use setdefaut() to avoid
# races.
try:
x = self._init_once_cache[tag]
except KeyError:
x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
# Common case: we got (True, result), so we return the result.
if x[0]:
return x[1]
# Else, it's a lock. Acquire it to serialize the following tests.
with x[1]:
# Read again from _init_once_cache the current status.
x = self._init_once_cache[tag]
if x[0]:
return x[1]
# Call the function and store the result back.
result = func()
self._init_once_cache[tag] = (True, result)
return result
def embedding_init_code(self, pysource):
if self._embedding:
raise ValueError("embedding_init_code() can only be called once")
# fix 'pysource' before it gets dumped into the C file:
# - remove empty lines at the beginning, so it starts at "line 1"
# - dedent, if all non-empty lines are indented
# - check for SyntaxErrors
import re
match = re.match(r'\s*\n', pysource)
if match:
pysource = pysource[match.end():]
lines = pysource.splitlines() or ['']
prefix = re.match(r'\s*', lines[0]).group()
for i in range(1, len(lines)):
line = lines[i]
if line.rstrip():
while not line.startswith(prefix):
prefix = prefix[:-1]
i = len(prefix)
lines = [line[i:]+'\n' for line in lines]
pysource = ''.join(lines)
#
compile(pysource, "cffi_init", "exec")
#
self._embedding = pysource
def def_extern(self, *args, **kwds):
raise ValueError("ffi.def_extern() is only available on API-mode FFI "
"objects")
def list_types(self):
"""Returns the user type names known to this FFI instance.
This returns a tuple containing three lists of names:
(typedef_names, names_of_structs, names_of_unions)
"""
typedefs = []
structs = []
unions = []
for key in self._parser._declarations:
if key.startswith('typedef '):
typedefs.append(key[8:])
elif key.startswith('struct '):
structs.append(key[7:])
elif key.startswith('union '):
unions.append(key[6:])
typedefs.sort()
structs.sort()
unions.sort()
return (typedefs, structs, unions)
def _load_backend_lib(backend, name, flags):
import os
if not isinstance(name, basestring):
if sys.platform != "win32" or name is not None:
return backend.load_library(name, flags)
name = "c" # Windows: load_library(None) fails, but this works
# on Python 2 (backward compatibility hack only)
first_error = None
if '.' in name or '/' in name or os.sep in name:
try:
return backend.load_library(name, flags)
except OSError as e:
first_error = e
import ctypes.util
path = ctypes.util.find_library(name)
if path is None:
if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
raise OSError("dlopen(None) cannot work on Windows for Python 3 "
"(see http://bugs.python.org/issue23606)")
msg = ("ctypes.util.find_library() did not manage "
"to locate a library called %r" % (name,))
if first_error is not None:
msg = "%s. Additionally, %s" % (first_error, msg)
raise OSError(msg)
return backend.load_library(path, flags)
def _make_ffi_library(ffi, libname, flags):
backend = ffi._backend
backendlib = _load_backend_lib(backend, libname, flags)
#
def accessor_function(name):
key = 'function ' + name
tp, _ = ffi._parser._declarations[key]
BType = ffi._get_cached_btype(tp)
value = backendlib.load_function(BType, name)
library.__dict__[name] = value
#
def accessor_variable(name):
key = 'variable ' + name
tp, _ = ffi._parser._declarations[key]
BType = ffi._get_cached_btype(tp)
read_variable = backendlib.read_variable
write_variable = backendlib.write_variable
setattr(FFILibrary, name, property(
lambda self: read_variable(BType, name),
lambda self, value: write_variable(BType, name, value)))
#
def addressof_var(name):
try:
return addr_variables[name]
except KeyError:
with ffi._lock:
if name not in addr_variables:
key = 'variable ' + name
tp, _ = ffi._parser._declarations[key]
BType = ffi._get_cached_btype(tp)
if BType.kind != 'array':
BType = model.pointer_cache(ffi, BType)
p = backendlib.load_function(BType, name)
addr_variables[name] = p
return addr_variables[name]
#
def accessor_constant(name):
raise NotImplementedError("non-integer constant '%s' cannot be "
"accessed from a dlopen() library" % (name,))
#
def accessor_int_constant(name):
library.__dict__[name] = ffi._parser._int_constants[name]
#
accessors = {}
accessors_version = [False]
addr_variables = {}
#
def update_accessors():
if accessors_version[0] is ffi._cdef_version:
return
#
for key, (tp, _) in ffi._parser._declarations.items():
if not isinstance(tp, model.EnumType):
tag, name = key.split(' ', 1)
if tag == 'function':
accessors[name] = accessor_function
elif tag == 'variable':
accessors[name] = accessor_variable
elif tag == 'constant':
accessors[name] = accessor_constant
else:
for i, enumname in enumerate(tp.enumerators):
def accessor_enum(name, tp=tp, i=i):
tp.check_not_partial()
library.__dict__[name] = tp.enumvalues[i]
accessors[enumname] = accessor_enum
for name in ffi._parser._int_constants:
accessors.setdefault(name, accessor_int_constant)
accessors_version[0] = ffi._cdef_version
#
def make_accessor(name):
with ffi._lock:
if name in library.__dict__ or name in FFILibrary.__dict__:
return # added by another thread while waiting for the lock
if name not in accessors:
update_accessors()
if name not in accessors:
raise AttributeError(name)
accessors[name](name)
#
class FFILibrary(object):
def __getattr__(self, name):
make_accessor(name)
return getattr(self, name)
def __setattr__(self, name, value):
try:
property = getattr(self.__class__, name)
except AttributeError:
make_accessor(name)
setattr(self, name, value)
else:
property.__set__(self, value)
def __dir__(self):
with ffi._lock:
update_accessors()
return accessors.keys()
def __addressof__(self, name):
if name in library.__dict__:
return library.__dict__[name]
if name in FFILibrary.__dict__:
return addressof_var(name)
make_accessor(name)
if name in library.__dict__:
return library.__dict__[name]
if name in FFILibrary.__dict__:
return addressof_var(name)
raise AttributeError("cffi library has no function or "
"global variable named '%s'" % (name,))
def __cffi_close__(self):
backendlib.close_lib()
self.__dict__.clear()
#
if isinstance(libname, basestring):
try:
if not isinstance(libname, str): # unicode, on Python 2
libname = libname.encode('utf-8')
FFILibrary.__name__ = 'FFILibrary_%s' % libname
except UnicodeError:
pass
library = FFILibrary()
return library, library.__dict__
def _builtin_function_type(func):
# a hack to make at least ffi.typeof(builtin_function) work,
# if the builtin function was obtained by 'vengine_cpy'.
import sys
try:
module = sys.modules[func.__module__]
ffi = module._cffi_original_ffi
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
tp = types_of_builtin_funcs[func]
except (KeyError, AttributeError, TypeError):
return None
else:
with ffi._lock:
return ffi._get_cached_btype(tp)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,187 @@
from .error import VerificationError
class CffiOp(object):
def __init__(self, op, arg):
self.op = op
self.arg = arg
def as_c_expr(self):
if self.op is None:
assert isinstance(self.arg, str)
return '(_cffi_opcode_t)(%s)' % (self.arg,)
classname = CLASS_NAME[self.op]
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
def as_python_bytes(self):
if self.op is None and self.arg.isdigit():
value = int(self.arg) # non-negative: '-' not in self.arg
if value >= 2**31:
raise OverflowError("cannot emit %r: limited to 2**31-1"
% (self.arg,))
return format_four_bytes(value)
if isinstance(self.arg, str):
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
return format_four_bytes((self.arg << 8) | self.op)
def __str__(self):
classname = CLASS_NAME.get(self.op, self.op)
return '(%s %s)' % (classname, self.arg)
def format_four_bytes(num):
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
(num >> 24) & 0xFF,
(num >> 16) & 0xFF,
(num >> 8) & 0xFF,
(num ) & 0xFF)
OP_PRIMITIVE = 1
OP_POINTER = 3
OP_ARRAY = 5
OP_OPEN_ARRAY = 7
OP_STRUCT_UNION = 9
OP_ENUM = 11
OP_FUNCTION = 13
OP_FUNCTION_END = 15
OP_NOOP = 17
OP_BITFIELD = 19
OP_TYPENAME = 21
OP_CPYTHON_BLTN_V = 23 # varargs
OP_CPYTHON_BLTN_N = 25 # noargs
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
OP_CONSTANT = 29
OP_CONSTANT_INT = 31
OP_GLOBAL_VAR = 33
OP_DLOPEN_FUNC = 35
OP_DLOPEN_CONST = 37
OP_GLOBAL_VAR_F = 39
OP_EXTERN_PYTHON = 41
PRIM_VOID = 0
PRIM_BOOL = 1
PRIM_CHAR = 2
PRIM_SCHAR = 3
PRIM_UCHAR = 4
PRIM_SHORT = 5
PRIM_USHORT = 6
PRIM_INT = 7
PRIM_UINT = 8
PRIM_LONG = 9
PRIM_ULONG = 10
PRIM_LONGLONG = 11
PRIM_ULONGLONG = 12
PRIM_FLOAT = 13
PRIM_DOUBLE = 14
PRIM_LONGDOUBLE = 15
PRIM_WCHAR = 16
PRIM_INT8 = 17
PRIM_UINT8 = 18
PRIM_INT16 = 19
PRIM_UINT16 = 20
PRIM_INT32 = 21
PRIM_UINT32 = 22
PRIM_INT64 = 23
PRIM_UINT64 = 24
PRIM_INTPTR = 25
PRIM_UINTPTR = 26
PRIM_PTRDIFF = 27
PRIM_SIZE = 28
PRIM_SSIZE = 29
PRIM_INT_LEAST8 = 30
PRIM_UINT_LEAST8 = 31
PRIM_INT_LEAST16 = 32
PRIM_UINT_LEAST16 = 33
PRIM_INT_LEAST32 = 34
PRIM_UINT_LEAST32 = 35
PRIM_INT_LEAST64 = 36
PRIM_UINT_LEAST64 = 37
PRIM_INT_FAST8 = 38
PRIM_UINT_FAST8 = 39
PRIM_INT_FAST16 = 40
PRIM_UINT_FAST16 = 41
PRIM_INT_FAST32 = 42
PRIM_UINT_FAST32 = 43
PRIM_INT_FAST64 = 44
PRIM_UINT_FAST64 = 45
PRIM_INTMAX = 46
PRIM_UINTMAX = 47
PRIM_FLOATCOMPLEX = 48
PRIM_DOUBLECOMPLEX = 49
PRIM_CHAR16 = 50
PRIM_CHAR32 = 51
_NUM_PRIM = 52
_UNKNOWN_PRIM = -1
_UNKNOWN_FLOAT_PRIM = -2
_UNKNOWN_LONG_DOUBLE = -3
_IO_FILE_STRUCT = -1
PRIMITIVE_TO_INDEX = {
'char': PRIM_CHAR,
'short': PRIM_SHORT,
'int': PRIM_INT,
'long': PRIM_LONG,
'long long': PRIM_LONGLONG,
'signed char': PRIM_SCHAR,
'unsigned char': PRIM_UCHAR,
'unsigned short': PRIM_USHORT,
'unsigned int': PRIM_UINT,
'unsigned long': PRIM_ULONG,
'unsigned long long': PRIM_ULONGLONG,
'float': PRIM_FLOAT,
'double': PRIM_DOUBLE,
'long double': PRIM_LONGDOUBLE,
'float _Complex': PRIM_FLOATCOMPLEX,
'double _Complex': PRIM_DOUBLECOMPLEX,
'_Bool': PRIM_BOOL,
'wchar_t': PRIM_WCHAR,
'char16_t': PRIM_CHAR16,
'char32_t': PRIM_CHAR32,
'int8_t': PRIM_INT8,
'uint8_t': PRIM_UINT8,
'int16_t': PRIM_INT16,
'uint16_t': PRIM_UINT16,
'int32_t': PRIM_INT32,
'uint32_t': PRIM_UINT32,
'int64_t': PRIM_INT64,
'uint64_t': PRIM_UINT64,
'intptr_t': PRIM_INTPTR,
'uintptr_t': PRIM_UINTPTR,
'ptrdiff_t': PRIM_PTRDIFF,
'size_t': PRIM_SIZE,
'ssize_t': PRIM_SSIZE,
'int_least8_t': PRIM_INT_LEAST8,
'uint_least8_t': PRIM_UINT_LEAST8,
'int_least16_t': PRIM_INT_LEAST16,
'uint_least16_t': PRIM_UINT_LEAST16,
'int_least32_t': PRIM_INT_LEAST32,
'uint_least32_t': PRIM_UINT_LEAST32,
'int_least64_t': PRIM_INT_LEAST64,
'uint_least64_t': PRIM_UINT_LEAST64,
'int_fast8_t': PRIM_INT_FAST8,
'uint_fast8_t': PRIM_UINT_FAST8,
'int_fast16_t': PRIM_INT_FAST16,
'uint_fast16_t': PRIM_UINT_FAST16,
'int_fast32_t': PRIM_INT_FAST32,
'uint_fast32_t': PRIM_UINT_FAST32,
'int_fast64_t': PRIM_INT_FAST64,
'uint_fast64_t': PRIM_UINT_FAST64,
'intmax_t': PRIM_INTMAX,
'uintmax_t': PRIM_UINTMAX,
}
F_UNION = 0x01
F_CHECK_FIELDS = 0x02
F_PACKED = 0x04
F_EXTERNAL = 0x08
F_OPAQUE = 0x10
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
'F_EXTERNAL', 'F_OPAQUE']])
CLASS_NAME = {}
for _name, _value in list(globals().items()):
if _name.startswith('OP_') and isinstance(_value, int):
CLASS_NAME[_value] = _name[3:]

View File

@ -0,0 +1,80 @@
import sys
from . import model
from .error import FFIError
COMMON_TYPES = {}
try:
# fetch "bool" and all simple Windows types
from _cffi_backend import _get_common_types
_get_common_types(COMMON_TYPES)
except ImportError:
pass
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
if _type.endswith('_t'):
COMMON_TYPES[_type] = _type
del _type
_CACHE = {}
def resolve_common_type(parser, commontype):
try:
return _CACHE[commontype]
except KeyError:
cdecl = COMMON_TYPES.get(commontype, commontype)
if not isinstance(cdecl, str):
result, quals = cdecl, 0 # cdecl is already a BaseType
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
result, quals = model.PrimitiveType(cdecl), 0
elif cdecl == 'set-unicode-needed':
raise FFIError("The Windows type %r is only available after "
"you call ffi.set_unicode()" % (commontype,))
else:
if commontype == cdecl:
raise FFIError(
"Unsupported type: %r. Please look at "
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
"and file an issue if you think this type should really "
"be supported." % (commontype,))
result, quals = parser.parse_type_and_quals(cdecl) # recursive
assert isinstance(result, model.BaseTypeByIdentity)
_CACHE[commontype] = result, quals
return result, quals
# ____________________________________________________________
# extra types for Windows (most of them are in commontypes.c)
def win_common_types():
return {
"UNICODE_STRING": model.StructType(
"_UNICODE_STRING",
["Length",
"MaximumLength",
"Buffer"],
[model.PrimitiveType("unsigned short"),
model.PrimitiveType("unsigned short"),
model.PointerType(model.PrimitiveType("wchar_t"))],
[-1, -1, -1]),
"PUNICODE_STRING": "UNICODE_STRING *",
"PCUNICODE_STRING": "const UNICODE_STRING *",
"TBYTE": "set-unicode-needed",
"TCHAR": "set-unicode-needed",
"LPCTSTR": "set-unicode-needed",
"PCTSTR": "set-unicode-needed",
"LPTSTR": "set-unicode-needed",
"PTSTR": "set-unicode-needed",
"PTBYTE": "set-unicode-needed",
"PTCHAR": "set-unicode-needed",
}
if sys.platform == 'win32':
COMMON_TYPES.update(win_common_types())

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,31 @@
class FFIError(Exception):
__module__ = 'cffi'
class CDefError(Exception):
__module__ = 'cffi'
def __str__(self):
try:
current_decl = self.args[1]
filename = current_decl.coord.file
linenum = current_decl.coord.line
prefix = '%s:%d: ' % (filename, linenum)
except (AttributeError, TypeError, IndexError):
prefix = ''
return '%s%s' % (prefix, self.args[0])
class VerificationError(Exception):
""" An error raised when verification fails
"""
__module__ = 'cffi'
class VerificationMissing(Exception):
""" An error raised when incomplete structures are passed into
cdef, but no verification has been done
"""
__module__ = 'cffi'
class PkgConfigError(Exception):
""" An error raised for missing modules in pkg-config
"""
__module__ = 'cffi'

View File

@ -0,0 +1,127 @@
import sys, os
from .error import VerificationError
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
'extra_objects', 'depends']
def get_extension(srcfilename, modname, sources=(), **kwds):
_hack_at_distutils()
from distutils.core import Extension
allsources = [srcfilename]
for src in sources:
allsources.append(os.path.normpath(src))
return Extension(name=modname, sources=allsources, **kwds)
def compile(tmpdir, ext, compiler_verbose=0, debug=None):
"""Compile a C extension module using distutils."""
_hack_at_distutils()
saved_environ = os.environ.copy()
try:
outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
outputfilename = os.path.abspath(outputfilename)
finally:
# workaround for a distutils bugs where some env vars can
# become longer and longer every time it is used
for key, value in saved_environ.items():
if os.environ.get(key) != value:
os.environ[key] = value
return outputfilename
def _build(tmpdir, ext, compiler_verbose=0, debug=None):
# XXX compact but horrible :-(
from distutils.core import Distribution
import distutils.errors, distutils.log
#
dist = Distribution({'ext_modules': [ext]})
dist.parse_config_files()
options = dist.get_option_dict('build_ext')
if debug is None:
debug = sys.flags.debug
options['debug'] = ('ffiplatform', debug)
options['force'] = ('ffiplatform', True)
options['build_lib'] = ('ffiplatform', tmpdir)
options['build_temp'] = ('ffiplatform', tmpdir)
#
try:
old_level = distutils.log.set_threshold(0) or 0
try:
distutils.log.set_verbosity(compiler_verbose)
dist.run_command('build_ext')
cmd_obj = dist.get_command_obj('build_ext')
[soname] = cmd_obj.get_outputs()
finally:
distutils.log.set_threshold(old_level)
except (distutils.errors.CompileError,
distutils.errors.LinkError) as e:
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
#
return soname
try:
from os.path import samefile
except ImportError:
def samefile(f1, f2):
return os.path.abspath(f1) == os.path.abspath(f2)
def maybe_relative_path(path):
if not os.path.isabs(path):
return path # already relative
dir = path
names = []
while True:
prevdir = dir
dir, name = os.path.split(prevdir)
if dir == prevdir or not dir:
return path # failed to make it relative
names.append(name)
try:
if samefile(dir, os.curdir):
names.reverse()
return os.path.join(*names)
except OSError:
pass
# ____________________________________________________________
try:
int_or_long = (int, long)
import cStringIO
except NameError:
int_or_long = int # Python 3
import io as cStringIO
def _flatten(x, f):
if isinstance(x, str):
f.write('%ds%s' % (len(x), x))
elif isinstance(x, dict):
keys = sorted(x.keys())
f.write('%dd' % len(keys))
for key in keys:
_flatten(key, f)
_flatten(x[key], f)
elif isinstance(x, (list, tuple)):
f.write('%dl' % len(x))
for value in x:
_flatten(value, f)
elif isinstance(x, int_or_long):
f.write('%di' % (x,))
else:
raise TypeError(
"the keywords to verify() contains unsupported object %r" % (x,))
def flatten(x):
f = cStringIO.StringIO()
_flatten(x, f)
return f.getvalue()
def _hack_at_distutils():
# Windows-only workaround for some configurations: see
# https://bugs.python.org/issue23246 (Python 2.7 with
# a specific MS compiler suite download)
if sys.platform == "win32":
try:
import setuptools # for side-effects, patches distutils
except ImportError:
pass

View File

@ -0,0 +1,30 @@
import sys
if sys.version_info < (3,):
try:
from thread import allocate_lock
except ImportError:
from dummy_thread import allocate_lock
else:
try:
from _thread import allocate_lock
except ImportError:
from _dummy_thread import allocate_lock
##import sys
##l1 = allocate_lock
##class allocate_lock(object):
## def __init__(self):
## self._real = l1()
## def __enter__(self):
## for i in range(4, 0, -1):
## print sys._getframe(i).f_code
## print
## return self._real.__enter__()
## def __exit__(self, *args):
## return self._real.__exit__(*args)
## def acquire(self, f):
## assert f is False
## return self._real.acquire(f)

View File

@ -0,0 +1,617 @@
import types
import weakref
from .lock import allocate_lock
from .error import CDefError, VerificationError, VerificationMissing
# type qualifiers
Q_CONST = 0x01
Q_RESTRICT = 0x02
Q_VOLATILE = 0x04
def qualify(quals, replace_with):
if quals & Q_CONST:
replace_with = ' const ' + replace_with.lstrip()
if quals & Q_VOLATILE:
replace_with = ' volatile ' + replace_with.lstrip()
if quals & Q_RESTRICT:
# It seems that __restrict is supported by gcc and msvc.
# If you hit some different compiler, add a #define in
# _cffi_include.h for it (and in its copies, documented there)
replace_with = ' __restrict ' + replace_with.lstrip()
return replace_with
class BaseTypeByIdentity(object):
is_array_type = False
is_raw_function = False
def get_c_name(self, replace_with='', context='a C file', quals=0):
result = self.c_name_with_marker
assert result.count('&') == 1
# some logic duplication with ffi.getctype()... :-(
replace_with = replace_with.strip()
if replace_with:
if replace_with.startswith('*') and '&[' in result:
replace_with = '(%s)' % replace_with
elif not replace_with[0] in '[(':
replace_with = ' ' + replace_with
replace_with = qualify(quals, replace_with)
result = result.replace('&', replace_with)
if '$' in result:
raise VerificationError(
"cannot generate '%s' in %s: unknown type name"
% (self._get_c_name(), context))
return result
def _get_c_name(self):
return self.c_name_with_marker.replace('&', '')
def has_c_name(self):
return '$' not in self._get_c_name()
def is_integer_type(self):
return False
def get_cached_btype(self, ffi, finishlist, can_delay=False):
try:
BType = ffi._cached_btypes[self]
except KeyError:
BType = self.build_backend_type(ffi, finishlist)
BType2 = ffi._cached_btypes.setdefault(self, BType)
assert BType2 is BType
return BType
def __repr__(self):
return '<%s>' % (self._get_c_name(),)
def _get_items(self):
return [(name, getattr(self, name)) for name in self._attrs_]
class BaseType(BaseTypeByIdentity):
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self._get_items() == other._get_items())
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((self.__class__, tuple(self._get_items())))
class VoidType(BaseType):
_attrs_ = ()
def __init__(self):
self.c_name_with_marker = 'void&'
def build_backend_type(self, ffi, finishlist):
return global_cache(self, ffi, 'new_void_type')
void_type = VoidType()
class BasePrimitiveType(BaseType):
def is_complex_type(self):
return False
class PrimitiveType(BasePrimitiveType):
_attrs_ = ('name',)
ALL_PRIMITIVE_TYPES = {
'char': 'c',
'short': 'i',
'int': 'i',
'long': 'i',
'long long': 'i',
'signed char': 'i',
'unsigned char': 'i',
'unsigned short': 'i',
'unsigned int': 'i',
'unsigned long': 'i',
'unsigned long long': 'i',
'float': 'f',
'double': 'f',
'long double': 'f',
'float _Complex': 'j',
'double _Complex': 'j',
'_Bool': 'i',
# the following types are not primitive in the C sense
'wchar_t': 'c',
'char16_t': 'c',
'char32_t': 'c',
'int8_t': 'i',
'uint8_t': 'i',
'int16_t': 'i',
'uint16_t': 'i',
'int32_t': 'i',
'uint32_t': 'i',
'int64_t': 'i',
'uint64_t': 'i',
'int_least8_t': 'i',
'uint_least8_t': 'i',
'int_least16_t': 'i',
'uint_least16_t': 'i',
'int_least32_t': 'i',
'uint_least32_t': 'i',
'int_least64_t': 'i',
'uint_least64_t': 'i',
'int_fast8_t': 'i',
'uint_fast8_t': 'i',
'int_fast16_t': 'i',
'uint_fast16_t': 'i',
'int_fast32_t': 'i',
'uint_fast32_t': 'i',
'int_fast64_t': 'i',
'uint_fast64_t': 'i',
'intptr_t': 'i',
'uintptr_t': 'i',
'intmax_t': 'i',
'uintmax_t': 'i',
'ptrdiff_t': 'i',
'size_t': 'i',
'ssize_t': 'i',
}
def __init__(self, name):
assert name in self.ALL_PRIMITIVE_TYPES
self.name = name
self.c_name_with_marker = name + '&'
def is_char_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
def is_integer_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
def is_float_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
def is_complex_type(self):
return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
def build_backend_type(self, ffi, finishlist):
return global_cache(self, ffi, 'new_primitive_type', self.name)
class UnknownIntegerType(BasePrimitiveType):
_attrs_ = ('name',)
def __init__(self, name):
self.name = name
self.c_name_with_marker = name + '&'
def is_integer_type(self):
return True
def build_backend_type(self, ffi, finishlist):
raise NotImplementedError("integer type '%s' can only be used after "
"compilation" % self.name)
class UnknownFloatType(BasePrimitiveType):
_attrs_ = ('name', )
def __init__(self, name):
self.name = name
self.c_name_with_marker = name + '&'
def build_backend_type(self, ffi, finishlist):
raise NotImplementedError("float type '%s' can only be used after "
"compilation" % self.name)
class BaseFunctionType(BaseType):
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
def __init__(self, args, result, ellipsis, abi=None):
self.args = args
self.result = result
self.ellipsis = ellipsis
self.abi = abi
#
reprargs = [arg._get_c_name() for arg in self.args]
if self.ellipsis:
reprargs.append('...')
reprargs = reprargs or ['void']
replace_with = self._base_pattern % (', '.join(reprargs),)
if abi is not None:
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
self.c_name_with_marker = (
self.result.c_name_with_marker.replace('&', replace_with))
class RawFunctionType(BaseFunctionType):
# Corresponds to a C type like 'int(int)', which is the C type of
# a function, but not a pointer-to-function. The backend has no
# notion of such a type; it's used temporarily by parsing.
_base_pattern = '(&)(%s)'
is_raw_function = True
def build_backend_type(self, ffi, finishlist):
raise CDefError("cannot render the type %r: it is a function "
"type, not a pointer-to-function type" % (self,))
def as_function_pointer(self):
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
class FunctionPtrType(BaseFunctionType):
_base_pattern = '(*&)(%s)'
def build_backend_type(self, ffi, finishlist):
result = self.result.get_cached_btype(ffi, finishlist)
args = []
for tp in self.args:
args.append(tp.get_cached_btype(ffi, finishlist))
abi_args = ()
if self.abi == "__stdcall":
if not self.ellipsis: # __stdcall ignored for variadic funcs
try:
abi_args = (ffi._backend.FFI_STDCALL,)
except AttributeError:
pass
return global_cache(self, ffi, 'new_function_type',
tuple(args), result, self.ellipsis, *abi_args)
def as_raw_function(self):
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
class PointerType(BaseType):
_attrs_ = ('totype', 'quals')
def __init__(self, totype, quals=0):
self.totype = totype
self.quals = quals
extra = qualify(quals, " *&")
if totype.is_array_type:
extra = "(%s)" % (extra.lstrip(),)
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
def build_backend_type(self, ffi, finishlist):
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
return global_cache(self, ffi, 'new_pointer_type', BItem)
voidp_type = PointerType(void_type)
def ConstPointerType(totype):
return PointerType(totype, Q_CONST)
const_voidp_type = ConstPointerType(void_type)
class NamedPointerType(PointerType):
_attrs_ = ('totype', 'name')
def __init__(self, totype, name, quals=0):
PointerType.__init__(self, totype, quals)
self.name = name
self.c_name_with_marker = name + '&'
class ArrayType(BaseType):
_attrs_ = ('item', 'length')
is_array_type = True
def __init__(self, item, length):
self.item = item
self.length = length
#
if length is None:
brackets = '&[]'
elif length == '...':
brackets = '&[/*...*/]'
else:
brackets = '&[%s]' % length
self.c_name_with_marker = (
self.item.c_name_with_marker.replace('&', brackets))
def length_is_unknown(self):
return isinstance(self.length, str)
def resolve_length(self, newlength):
return ArrayType(self.item, newlength)
def build_backend_type(self, ffi, finishlist):
if self.length_is_unknown():
raise CDefError("cannot render the type %r: unknown length" %
(self,))
self.item.get_cached_btype(ffi, finishlist) # force the item BType
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
char_array_type = ArrayType(PrimitiveType('char'), None)
class StructOrUnionOrEnum(BaseTypeByIdentity):
_attrs_ = ('name',)
forcename = None
def build_c_name_with_marker(self):
name = self.forcename or '%s %s' % (self.kind, self.name)
self.c_name_with_marker = name + '&'
def force_the_name(self, forcename):
self.forcename = forcename
self.build_c_name_with_marker()
def get_official_name(self):
assert self.c_name_with_marker.endswith('&')
return self.c_name_with_marker[:-1]
class StructOrUnion(StructOrUnionOrEnum):
fixedlayout = None
completed = 0
partial = False
packed = 0
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
self.name = name
self.fldnames = fldnames
self.fldtypes = fldtypes
self.fldbitsize = fldbitsize
self.fldquals = fldquals
self.build_c_name_with_marker()
def anonymous_struct_fields(self):
if self.fldtypes is not None:
for name, type in zip(self.fldnames, self.fldtypes):
if name == '' and isinstance(type, StructOrUnion):
yield type
def enumfields(self, expand_anonymous_struct_union=True):
fldquals = self.fldquals
if fldquals is None:
fldquals = (0,) * len(self.fldnames)
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
self.fldbitsize, fldquals):
if (name == '' and isinstance(type, StructOrUnion)
and expand_anonymous_struct_union):
# nested anonymous struct/union
for result in type.enumfields():
yield result
else:
yield (name, type, bitsize, quals)
def force_flatten(self):
# force the struct or union to have a declaration that lists
# directly all fields returned by enumfields(), flattening
# nested anonymous structs/unions.
names = []
types = []
bitsizes = []
fldquals = []
for name, type, bitsize, quals in self.enumfields():
names.append(name)
types.append(type)
bitsizes.append(bitsize)
fldquals.append(quals)
self.fldnames = tuple(names)
self.fldtypes = tuple(types)
self.fldbitsize = tuple(bitsizes)
self.fldquals = tuple(fldquals)
def get_cached_btype(self, ffi, finishlist, can_delay=False):
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
can_delay)
if not can_delay:
self.finish_backend_type(ffi, finishlist)
return BType
def finish_backend_type(self, ffi, finishlist):
if self.completed:
if self.completed != 2:
raise NotImplementedError("recursive structure declaration "
"for '%s'" % (self.name,))
return
BType = ffi._cached_btypes[self]
#
self.completed = 1
#
if self.fldtypes is None:
pass # not completing it: it's an opaque struct
#
elif self.fixedlayout is None:
fldtypes = [tp.get_cached_btype(ffi, finishlist)
for tp in self.fldtypes]
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
extra_flags = ()
if self.packed:
if self.packed == 1:
extra_flags = (8,) # SF_PACKED
else:
extra_flags = (0, self.packed)
ffi._backend.complete_struct_or_union(BType, lst, self,
-1, -1, *extra_flags)
#
else:
fldtypes = []
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
for i in range(len(self.fldnames)):
fsize = fieldsize[i]
ftype = self.fldtypes[i]
#
if isinstance(ftype, ArrayType) and ftype.length_is_unknown():
# fix the length to match the total size
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
if nrest != 0:
self._verification_error(
"field '%s.%s' has a bogus size?" % (
self.name, self.fldnames[i] or '{}'))
ftype = ftype.resolve_length(nlen)
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
self.fldtypes[i+1:])
#
BFieldType = ftype.get_cached_btype(ffi, finishlist)
if isinstance(ftype, ArrayType) and ftype.length is None:
assert fsize == 0
else:
bitemsize = ffi.sizeof(BFieldType)
if bitemsize != fsize:
self._verification_error(
"field '%s.%s' is declared as %d bytes, but is "
"really %d bytes" % (self.name,
self.fldnames[i] or '{}',
bitemsize, fsize))
fldtypes.append(BFieldType)
#
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
ffi._backend.complete_struct_or_union(BType, lst, self,
totalsize, totalalignment)
self.completed = 2
def _verification_error(self, msg):
raise VerificationError(msg)
def check_not_partial(self):
if self.partial and self.fixedlayout is None:
raise VerificationMissing(self._get_c_name())
def build_backend_type(self, ffi, finishlist):
self.check_not_partial()
finishlist.append(self)
#
return global_cache(self, ffi, 'new_%s_type' % self.kind,
self.get_official_name(), key=self)
class StructType(StructOrUnion):
kind = 'struct'
class UnionType(StructOrUnion):
kind = 'union'
class EnumType(StructOrUnionOrEnum):
kind = 'enum'
partial = False
partial_resolved = False
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
self.name = name
self.enumerators = enumerators
self.enumvalues = enumvalues
self.baseinttype = baseinttype
self.build_c_name_with_marker()
def force_the_name(self, forcename):
StructOrUnionOrEnum.force_the_name(self, forcename)
if self.forcename is None:
name = self.get_official_name()
self.forcename = '$' + name.replace(' ', '_')
def check_not_partial(self):
if self.partial and not self.partial_resolved:
raise VerificationMissing(self._get_c_name())
def build_backend_type(self, ffi, finishlist):
self.check_not_partial()
base_btype = self.build_baseinttype(ffi, finishlist)
return global_cache(self, ffi, 'new_enum_type',
self.get_official_name(),
self.enumerators, self.enumvalues,
base_btype, key=self)
def build_baseinttype(self, ffi, finishlist):
if self.baseinttype is not None:
return self.baseinttype.get_cached_btype(ffi, finishlist)
#
if self.enumvalues:
smallest_value = min(self.enumvalues)
largest_value = max(self.enumvalues)
else:
import warnings
try:
# XXX! The goal is to ensure that the warnings.warn()
# will not suppress the warning. We want to get it
# several times if we reach this point several times.
__warningregistry__.clear()
except NameError:
pass
warnings.warn("%r has no values explicitly defined; "
"guessing that it is equivalent to 'unsigned int'"
% self._get_c_name())
smallest_value = largest_value = 0
if smallest_value < 0: # needs a signed type
sign = 1
candidate1 = PrimitiveType("int")
candidate2 = PrimitiveType("long")
else:
sign = 0
candidate1 = PrimitiveType("unsigned int")
candidate2 = PrimitiveType("unsigned long")
btype1 = candidate1.get_cached_btype(ffi, finishlist)
btype2 = candidate2.get_cached_btype(ffi, finishlist)
size1 = ffi.sizeof(btype1)
size2 = ffi.sizeof(btype2)
if (smallest_value >= ((-1) << (8*size1-1)) and
largest_value < (1 << (8*size1-sign))):
return btype1
if (smallest_value >= ((-1) << (8*size2-1)) and
largest_value < (1 << (8*size2-sign))):
return btype2
raise CDefError("%s values don't all fit into either 'long' "
"or 'unsigned long'" % self._get_c_name())
def unknown_type(name, structname=None):
if structname is None:
structname = '$%s' % name
tp = StructType(structname, None, None, None)
tp.force_the_name(name)
tp.origin = "unknown_type"
return tp
def unknown_ptr_type(name, structname=None):
if structname is None:
structname = '$$%s' % name
tp = StructType(structname, None, None, None)
return NamedPointerType(tp, name)
global_lock = allocate_lock()
_typecache_cffi_backend = weakref.WeakValueDictionary()
def get_typecache(backend):
# returns _typecache_cffi_backend if backend is the _cffi_backend
# module, or type(backend).__typecache if backend is an instance of
# CTypesBackend (or some FakeBackend class during tests)
if isinstance(backend, types.ModuleType):
return _typecache_cffi_backend
with global_lock:
if not hasattr(type(backend), '__typecache'):
type(backend).__typecache = weakref.WeakValueDictionary()
return type(backend).__typecache
def global_cache(srctype, ffi, funcname, *args, **kwds):
key = kwds.pop('key', (funcname, args))
assert not kwds
try:
return ffi._typecache[key]
except KeyError:
pass
try:
res = getattr(ffi._backend, funcname)(*args)
except NotImplementedError as e:
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
# note that setdefault() on WeakValueDictionary is not atomic
# and contains a rare bug (http://bugs.python.org/issue19542);
# we have to use a lock and do it ourselves
cache = ffi._typecache
with global_lock:
res1 = cache.get(key)
if res1 is None:
cache[key] = res
return res
else:
return res1
def pointer_cache(ffi, BType):
return global_cache('?', ffi, 'new_pointer_type', BType)
def attach_exception_info(e, name):
if e.args and type(e.args[0]) is str:
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]

View File

@ -0,0 +1,181 @@
/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
beginning of C sources generated by CFFI's ffi.set_source(). */
typedef void *_cffi_opcode_t;
#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
#define _CFFI_OP_PRIMITIVE 1
#define _CFFI_OP_POINTER 3
#define _CFFI_OP_ARRAY 5
#define _CFFI_OP_OPEN_ARRAY 7
#define _CFFI_OP_STRUCT_UNION 9
#define _CFFI_OP_ENUM 11
#define _CFFI_OP_FUNCTION 13
#define _CFFI_OP_FUNCTION_END 15
#define _CFFI_OP_NOOP 17
#define _CFFI_OP_BITFIELD 19
#define _CFFI_OP_TYPENAME 21
#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
#define _CFFI_OP_CONSTANT 29
#define _CFFI_OP_CONSTANT_INT 31
#define _CFFI_OP_GLOBAL_VAR 33
#define _CFFI_OP_DLOPEN_FUNC 35
#define _CFFI_OP_DLOPEN_CONST 37
#define _CFFI_OP_GLOBAL_VAR_F 39
#define _CFFI_OP_EXTERN_PYTHON 41
#define _CFFI_PRIM_VOID 0
#define _CFFI_PRIM_BOOL 1
#define _CFFI_PRIM_CHAR 2
#define _CFFI_PRIM_SCHAR 3
#define _CFFI_PRIM_UCHAR 4
#define _CFFI_PRIM_SHORT 5
#define _CFFI_PRIM_USHORT 6
#define _CFFI_PRIM_INT 7
#define _CFFI_PRIM_UINT 8
#define _CFFI_PRIM_LONG 9
#define _CFFI_PRIM_ULONG 10
#define _CFFI_PRIM_LONGLONG 11
#define _CFFI_PRIM_ULONGLONG 12
#define _CFFI_PRIM_FLOAT 13
#define _CFFI_PRIM_DOUBLE 14
#define _CFFI_PRIM_LONGDOUBLE 15
#define _CFFI_PRIM_WCHAR 16
#define _CFFI_PRIM_INT8 17
#define _CFFI_PRIM_UINT8 18
#define _CFFI_PRIM_INT16 19
#define _CFFI_PRIM_UINT16 20
#define _CFFI_PRIM_INT32 21
#define _CFFI_PRIM_UINT32 22
#define _CFFI_PRIM_INT64 23
#define _CFFI_PRIM_UINT64 24
#define _CFFI_PRIM_INTPTR 25
#define _CFFI_PRIM_UINTPTR 26
#define _CFFI_PRIM_PTRDIFF 27
#define _CFFI_PRIM_SIZE 28
#define _CFFI_PRIM_SSIZE 29
#define _CFFI_PRIM_INT_LEAST8 30
#define _CFFI_PRIM_UINT_LEAST8 31
#define _CFFI_PRIM_INT_LEAST16 32
#define _CFFI_PRIM_UINT_LEAST16 33
#define _CFFI_PRIM_INT_LEAST32 34
#define _CFFI_PRIM_UINT_LEAST32 35
#define _CFFI_PRIM_INT_LEAST64 36
#define _CFFI_PRIM_UINT_LEAST64 37
#define _CFFI_PRIM_INT_FAST8 38
#define _CFFI_PRIM_UINT_FAST8 39
#define _CFFI_PRIM_INT_FAST16 40
#define _CFFI_PRIM_UINT_FAST16 41
#define _CFFI_PRIM_INT_FAST32 42
#define _CFFI_PRIM_UINT_FAST32 43
#define _CFFI_PRIM_INT_FAST64 44
#define _CFFI_PRIM_UINT_FAST64 45
#define _CFFI_PRIM_INTMAX 46
#define _CFFI_PRIM_UINTMAX 47
#define _CFFI_PRIM_FLOATCOMPLEX 48
#define _CFFI_PRIM_DOUBLECOMPLEX 49
#define _CFFI_PRIM_CHAR16 50
#define _CFFI_PRIM_CHAR32 51
#define _CFFI__NUM_PRIM 52
#define _CFFI__UNKNOWN_PRIM (-1)
#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
#define _CFFI__IO_FILE_STRUCT (-1)
struct _cffi_global_s {
const char *name;
void *address;
_cffi_opcode_t type_op;
void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
// OP_CPYTHON_BLTN_*: addr of direct function
};
struct _cffi_getconst_s {
unsigned long long value;
const struct _cffi_type_context_s *ctx;
int gindex;
};
struct _cffi_struct_union_s {
const char *name;
int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
int flags; // _CFFI_F_* flags below
size_t size;
int alignment;
int first_field_index; // -> _cffi_fields array
int num_fields;
};
#define _CFFI_F_UNION 0x01 // is a union, not a struct
#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
// "standard layout" or if some are missing
#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
#define _CFFI_F_OPAQUE 0x10 // opaque
struct _cffi_field_s {
const char *name;
size_t field_offset;
size_t field_size;
_cffi_opcode_t field_type_op;
};
struct _cffi_enum_s {
const char *name;
int type_index; // -> _cffi_types, on a OP_ENUM
int type_prim; // _CFFI_PRIM_xxx
const char *enumerators; // comma-delimited string
};
struct _cffi_typename_s {
const char *name;
int type_index; /* if opaque, points to a possibly artificial
OP_STRUCT which is itself opaque */
};
struct _cffi_type_context_s {
_cffi_opcode_t *types;
const struct _cffi_global_s *globals;
const struct _cffi_field_s *fields;
const struct _cffi_struct_union_s *struct_unions;
const struct _cffi_enum_s *enums;
const struct _cffi_typename_s *typenames;
int num_globals;
int num_struct_unions;
int num_enums;
int num_typenames;
const char *const *includes;
int num_types;
int flags; /* future extension */
};
struct _cffi_parse_info_s {
const struct _cffi_type_context_s *ctx;
_cffi_opcode_t *output;
unsigned int output_size;
size_t error_location;
const char *error_message;
};
struct _cffi_externpy_s {
const char *name;
size_t size_of_result;
void *reserved1, *reserved2;
};
#ifdef _CFFI_INTERNAL
static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
static int search_in_globals(const struct _cffi_type_context_s *ctx,
const char *search, size_t search_len);
static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
const char *search, size_t search_len);
#endif

View File

@ -0,0 +1,121 @@
# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
import sys, os, subprocess
from .error import PkgConfigError
def merge_flags(cfg1, cfg2):
"""Merge values from cffi config flags cfg2 to cf1
Example:
merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
{"libraries": ["one", "two"]}
"""
for key, value in cfg2.items():
if key not in cfg1:
cfg1[key] = value
else:
if not isinstance(cfg1[key], list):
raise TypeError("cfg1[%r] should be a list of strings" % (key,))
if not isinstance(value, list):
raise TypeError("cfg2[%r] should be a list of strings" % (key,))
cfg1[key].extend(value)
return cfg1
def call(libname, flag, encoding=sys.getfilesystemencoding()):
"""Calls pkg-config and returns the output if found
"""
a = ["pkg-config", "--print-errors"]
a.append(flag)
a.append(libname)
try:
pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except EnvironmentError as e:
raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
bout, berr = pc.communicate()
if pc.returncode != 0:
try:
berr = berr.decode(encoding)
except Exception:
pass
raise PkgConfigError(berr.strip())
if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
try:
bout = bout.decode(encoding)
except UnicodeDecodeError:
raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
"be decoded with encoding %r:\n%r" %
(flag, libname, encoding, bout))
if os.altsep != '\\' and '\\' in bout:
raise PkgConfigError("pkg-config %s %s returned an unsupported "
"backslash-escaped output:\n%r" %
(flag, libname, bout))
return bout
def flags_from_pkgconfig(libs):
r"""Return compiler line flags for FFI.set_source based on pkg-config output
Usage
...
ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
If pkg-config is installed on build machine, then arguments include_dirs,
library_dirs, libraries, define_macros, extra_compile_args and
extra_link_args are extended with an output of pkg-config for libfoo and
libbar.
Raises PkgConfigError in case the pkg-config call fails.
"""
def get_include_dirs(string):
return [x[2:] for x in string.split() if x.startswith("-I")]
def get_library_dirs(string):
return [x[2:] for x in string.split() if x.startswith("-L")]
def get_libraries(string):
return [x[2:] for x in string.split() if x.startswith("-l")]
# convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
def get_macros(string):
def _macro(x):
x = x[2:] # drop "-D"
if '=' in x:
return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
else:
return (x, None) # "-Dfoo" => ("foo", None)
return [_macro(x) for x in string.split() if x.startswith("-D")]
def get_other_cflags(string):
return [x for x in string.split() if not x.startswith("-I") and
not x.startswith("-D")]
def get_other_libs(string):
return [x for x in string.split() if not x.startswith("-L") and
not x.startswith("-l")]
# return kwargs for given libname
def kwargs(libname):
fse = sys.getfilesystemencoding()
all_cflags = call(libname, "--cflags")
all_libs = call(libname, "--libs")
return {
"include_dirs": get_include_dirs(all_cflags),
"library_dirs": get_library_dirs(all_libs),
"libraries": get_libraries(all_libs),
"define_macros": get_macros(all_cflags),
"extra_compile_args": get_other_cflags(all_cflags),
"extra_link_args": get_other_libs(all_libs),
}
# merge all arguments together
ret = {}
for libname in libs:
lib_flags = kwargs(libname)
merge_flags(ret, lib_flags)
return ret

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,219 @@
import os
import sys
try:
basestring
except NameError:
# Python 3.x
basestring = str
def error(msg):
from distutils.errors import DistutilsSetupError
raise DistutilsSetupError(msg)
def execfile(filename, glob):
# We use execfile() (here rewritten for Python 3) instead of
# __import__() to load the build script. The problem with
# a normal import is that in some packages, the intermediate
# __init__.py files may already try to import the file that
# we are generating.
with open(filename) as f:
src = f.read()
src += '\n' # Python 2.6 compatibility
code = compile(src, filename, 'exec')
exec(code, glob, glob)
def add_cffi_module(dist, mod_spec):
from cffi.api import FFI
if not isinstance(mod_spec, basestring):
error("argument to 'cffi_modules=...' must be a str or a list of str,"
" not %r" % (type(mod_spec).__name__,))
mod_spec = str(mod_spec)
try:
build_file_name, ffi_var_name = mod_spec.split(':')
except ValueError:
error("%r must be of the form 'path/build.py:ffi_variable'" %
(mod_spec,))
if not os.path.exists(build_file_name):
ext = ''
rewritten = build_file_name.replace('.', '/') + '.py'
if os.path.exists(rewritten):
ext = ' (rewrite cffi_modules to [%r])' % (
rewritten + ':' + ffi_var_name,)
error("%r does not name an existing file%s" % (build_file_name, ext))
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
execfile(build_file_name, mod_vars)
try:
ffi = mod_vars[ffi_var_name]
except KeyError:
error("%r: object %r not found in module" % (mod_spec,
ffi_var_name))
if not isinstance(ffi, FFI):
ffi = ffi() # maybe it's a function instead of directly an ffi
if not isinstance(ffi, FFI):
error("%r is not an FFI instance (got %r)" % (mod_spec,
type(ffi).__name__))
if not hasattr(ffi, '_assigned_source'):
error("%r: the set_source() method was not called" % (mod_spec,))
module_name, source, source_extension, kwds = ffi._assigned_source
if ffi._windows_unicode:
kwds = kwds.copy()
ffi._apply_windows_unicode(kwds)
if source is None:
_add_py_module(dist, ffi, module_name)
else:
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
def _set_py_limited_api(Extension, kwds):
"""
Add py_limited_api to kwds if setuptools >= 26 is in use.
Do not alter the setting if it already exists.
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
CPython itself should ignore the flag in a debugging version
(by not listing .abi3.so in the extensions it supports), but
it doesn't so far, creating troubles. That's why we check
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
On Windows, with CPython <= 3.4, it's better not to use py_limited_api
because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
Recently (2020) we started shipping only >= 3.5 wheels, though. So
we'll give it another try and set py_limited_api on Windows >= 3.5.
"""
from cffi import recompiler
if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
and recompiler.USE_LIMITED_API):
import setuptools
try:
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
if setuptools_major_version >= 26:
kwds['py_limited_api'] = True
except ValueError: # certain development versions of setuptools
# If we don't know the version number of setuptools, we
# try to set 'py_limited_api' anyway. At worst, we get a
# warning.
kwds['py_limited_api'] = True
return kwds
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
from distutils.core import Extension
# We are a setuptools extension. Need this build_ext for py_limited_api.
from setuptools.command.build_ext import build_ext
from distutils.dir_util import mkpath
from distutils import log
from cffi import recompiler
allsources = ['$PLACEHOLDER']
allsources.extend(kwds.pop('sources', []))
kwds = _set_py_limited_api(Extension, kwds)
ext = Extension(name=module_name, sources=allsources, **kwds)
def make_mod(tmpdir, pre_run=None):
c_file = os.path.join(tmpdir, module_name + source_extension)
log.info("generating cffi module %r" % c_file)
mkpath(tmpdir)
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
# arguments just before we turn the ffi into C code. To use it,
# subclass the 'distutils.command.build_ext.build_ext' class and
# add a method 'def pre_run(self, ext, ffi)'.
if pre_run is not None:
pre_run(ext, ffi)
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
if not updated:
log.info("already up-to-date")
return c_file
if dist.ext_modules is None:
dist.ext_modules = []
dist.ext_modules.append(ext)
base_class = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class):
def run(self):
if ext.sources[0] == '$PLACEHOLDER':
pre_run = getattr(self, 'pre_run', None)
ext.sources[0] = make_mod(self.build_temp, pre_run)
base_class.run(self)
dist.cmdclass['build_ext'] = build_ext_make_mod
# NB. multiple runs here will create multiple 'build_ext_make_mod'
# classes. Even in this case the 'build_ext' command should be
# run once; but just in case, the logic above does nothing if
# called again.
def _add_py_module(dist, ffi, module_name):
from distutils.dir_util import mkpath
from setuptools.command.build_py import build_py
from setuptools.command.build_ext import build_ext
from distutils import log
from cffi import recompiler
def generate_mod(py_file):
log.info("generating cffi module %r" % py_file)
mkpath(os.path.dirname(py_file))
updated = recompiler.make_py_source(ffi, module_name, py_file)
if not updated:
log.info("already up-to-date")
base_class = dist.cmdclass.get('build_py', build_py)
class build_py_make_mod(base_class):
def run(self):
base_class.run(self)
module_path = module_name.split('.')
module_path[-1] += '.py'
generate_mod(os.path.join(self.build_lib, *module_path))
def get_source_files(self):
# This is called from 'setup.py sdist' only. Exclude
# the generate .py module in this case.
saved_py_modules = self.py_modules
try:
if saved_py_modules:
self.py_modules = [m for m in saved_py_modules
if m != module_name]
return base_class.get_source_files(self)
finally:
self.py_modules = saved_py_modules
dist.cmdclass['build_py'] = build_py_make_mod
# distutils and setuptools have no notion I could find of a
# generated python module. If we don't add module_name to
# dist.py_modules, then things mostly work but there are some
# combination of options (--root and --record) that will miss
# the module. So we add it here, which gives a few apparently
# harmless warnings about not finding the file outside the
# build directory.
# Then we need to hack more in get_source_files(); see above.
if dist.py_modules is None:
dist.py_modules = []
dist.py_modules.append(module_name)
# the following is only for "build_ext -i"
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
class build_ext_make_mod(base_class_2):
def run(self):
base_class_2.run(self)
if self.inplace:
# from get_ext_fullpath() in distutils/command/build_ext.py
module_path = module_name.split('.')
package = '.'.join(module_path[:-1])
build_py = self.get_finalized_command('build_py')
package_dir = build_py.get_package_dir(package)
file_name = module_path[-1] + '.py'
generate_mod(os.path.join(package_dir, file_name))
dist.cmdclass['build_ext'] = build_ext_make_mod
def cffi_modules(dist, attr, value):
assert attr == 'cffi_modules'
if isinstance(value, basestring):
value = [value]
for cffi_module in value:
add_cffi_module(dist, cffi_module)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,675 @@
#
# DEPRECATED: implementation for ffi.verify()
#
import sys, os
import types
from . import model
from .error import VerificationError
class VGenericEngine(object):
_class_key = 'g'
_gen_python_module = False
def __init__(self, verifier):
self.verifier = verifier
self.ffi = verifier.ffi
self.export_symbols = []
self._struct_pending_verification = {}
def patch_extension_kwds(self, kwds):
# add 'export_symbols' to the dictionary. Note that we add the
# list before filling it. When we fill it, it will thus also show
# up in kwds['export_symbols'].
kwds.setdefault('export_symbols', self.export_symbols)
def find_module(self, module_name, path, so_suffixes):
for so_suffix in so_suffixes:
basename = module_name + so_suffix
if path is None:
path = sys.path
for dirname in path:
filename = os.path.join(dirname, basename)
if os.path.isfile(filename):
return filename
def collect_types(self):
pass # not needed in the generic engine
def _prnt(self, what=''):
self._f.write(what + '\n')
def write_source_to_f(self):
prnt = self._prnt
# first paste some standard set of lines that are mostly '#include'
prnt(cffimod_header)
# then paste the C source given by the user, verbatim.
prnt(self.verifier.preamble)
#
# call generate_gen_xxx_decl(), for every xxx found from
# ffi._parser._declarations. This generates all the functions.
self._generate('decl')
#
# on Windows, distutils insists on putting init_cffi_xyz in
# 'export_symbols', so instead of fighting it, just give up and
# give it one
if sys.platform == 'win32':
if sys.version_info >= (3,):
prefix = 'PyInit_'
else:
prefix = 'init'
modname = self.verifier.get_module_name()
prnt("void %s%s(void) { }\n" % (prefix, modname))
def load_library(self, flags=0):
# import it with the CFFI backend
backend = self.ffi._backend
# needs to make a path that contains '/', on Posix
filename = os.path.join(os.curdir, self.verifier.modulefilename)
module = backend.load_library(filename, flags)
#
# call loading_gen_struct() to get the struct layout inferred by
# the C compiler
self._load(module, 'loading')
# build the FFILibrary class and instance, this is a module subclass
# because modules are expected to have usually-constant-attributes and
# in PyPy this means the JIT is able to treat attributes as constant,
# which we want.
class FFILibrary(types.ModuleType):
_cffi_generic_module = module
_cffi_ffi = self.ffi
_cffi_dir = []
def __dir__(self):
return FFILibrary._cffi_dir
library = FFILibrary("")
#
# finally, call the loaded_gen_xxx() functions. This will set
# up the 'library' object.
self._load(module, 'loaded', library=library)
return library
def _get_declarations(self):
lst = [(key, tp) for (key, (tp, qual)) in
self.ffi._parser._declarations.items()]
lst.sort()
return lst
def _generate(self, step_name):
for name, tp in self._get_declarations():
kind, realname = name.split(' ', 1)
try:
method = getattr(self, '_generate_gen_%s_%s' % (kind,
step_name))
except AttributeError:
raise VerificationError(
"not implemented in verify(): %r" % name)
try:
method(tp, realname)
except Exception as e:
model.attach_exception_info(e, name)
raise
def _load(self, module, step_name, **kwds):
for name, tp in self._get_declarations():
kind, realname = name.split(' ', 1)
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
try:
method(tp, realname, module, **kwds)
except Exception as e:
model.attach_exception_info(e, name)
raise
def _generate_nothing(self, tp, name):
pass
def _loaded_noop(self, tp, name, module, **kwds):
pass
# ----------
# typedefs: generates no code so far
_generate_gen_typedef_decl = _generate_nothing
_loading_gen_typedef = _loaded_noop
_loaded_gen_typedef = _loaded_noop
# ----------
# function declarations
def _generate_gen_function_decl(self, tp, name):
assert isinstance(tp, model.FunctionPtrType)
if tp.ellipsis:
# cannot support vararg functions better than this: check for its
# exact type (including the fixed arguments), and build it as a
# constant function pointer (no _cffi_f_%s wrapper)
self._generate_gen_const(False, name, tp)
return
prnt = self._prnt
numargs = len(tp.args)
argnames = []
for i, type in enumerate(tp.args):
indirection = ''
if isinstance(type, model.StructOrUnion):
indirection = '*'
argnames.append('%sx%d' % (indirection, i))
context = 'argument of %s' % name
arglist = [type.get_c_name(' %s' % arg, context)
for type, arg in zip(tp.args, argnames)]
tpresult = tp.result
if isinstance(tpresult, model.StructOrUnion):
arglist.insert(0, tpresult.get_c_name(' *r', context))
tpresult = model.void_type
arglist = ', '.join(arglist) or 'void'
wrappername = '_cffi_f_%s' % name
self.export_symbols.append(wrappername)
if tp.abi:
abi = tp.abi + ' '
else:
abi = ''
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
context = 'result of %s' % name
prnt(tpresult.get_c_name(funcdecl, context))
prnt('{')
#
if isinstance(tp.result, model.StructOrUnion):
result_code = '*r = '
elif not isinstance(tp.result, model.VoidType):
result_code = 'return '
else:
result_code = ''
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
prnt('}')
prnt()
_loading_gen_function = _loaded_noop
def _loaded_gen_function(self, tp, name, module, library):
assert isinstance(tp, model.FunctionPtrType)
if tp.ellipsis:
newfunction = self._load_constant(False, tp, name, module)
else:
indirections = []
base_tp = tp
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
or isinstance(tp.result, model.StructOrUnion)):
indirect_args = []
for i, typ in enumerate(tp.args):
if isinstance(typ, model.StructOrUnion):
typ = model.PointerType(typ)
indirections.append((i, typ))
indirect_args.append(typ)
indirect_result = tp.result
if isinstance(indirect_result, model.StructOrUnion):
if indirect_result.fldtypes is None:
raise TypeError("'%s' is used as result type, "
"but is opaque" % (
indirect_result._get_c_name(),))
indirect_result = model.PointerType(indirect_result)
indirect_args.insert(0, indirect_result)
indirections.insert(0, ("result", indirect_result))
indirect_result = model.void_type
tp = model.FunctionPtrType(tuple(indirect_args),
indirect_result, tp.ellipsis)
BFunc = self.ffi._get_cached_btype(tp)
wrappername = '_cffi_f_%s' % name
newfunction = module.load_function(BFunc, wrappername)
for i, typ in indirections:
newfunction = self._make_struct_wrapper(newfunction, i, typ,
base_tp)
setattr(library, name, newfunction)
type(library)._cffi_dir.append(name)
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
backend = self.ffi._backend
BType = self.ffi._get_cached_btype(tp)
if i == "result":
ffi = self.ffi
def newfunc(*args):
res = ffi.new(BType)
oldfunc(res, *args)
return res[0]
else:
def newfunc(*args):
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
return oldfunc(*args)
newfunc._cffi_base_type = base_tp
return newfunc
# ----------
# named structs
def _generate_gen_struct_decl(self, tp, name):
assert name == tp.name
self._generate_struct_or_union_decl(tp, 'struct', name)
def _loading_gen_struct(self, tp, name, module):
self._loading_struct_or_union(tp, 'struct', name, module)
def _loaded_gen_struct(self, tp, name, module, **kwds):
self._loaded_struct_or_union(tp)
def _generate_gen_union_decl(self, tp, name):
assert name == tp.name
self._generate_struct_or_union_decl(tp, 'union', name)
def _loading_gen_union(self, tp, name, module):
self._loading_struct_or_union(tp, 'union', name, module)
def _loaded_gen_union(self, tp, name, module, **kwds):
self._loaded_struct_or_union(tp)
def _generate_struct_or_union_decl(self, tp, prefix, name):
if tp.fldnames is None:
return # nothing to do with opaque structs
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
cname = ('%s %s' % (prefix, name)).strip()
#
prnt = self._prnt
prnt('static void %s(%s *p)' % (checkfuncname, cname))
prnt('{')
prnt(' /* only to generate compile-time warnings or errors */')
prnt(' (void)p;')
for fname, ftype, fbitsize, fqual in tp.enumfields():
if (isinstance(ftype, model.PrimitiveType)
and ftype.is_integer_type()) or fbitsize >= 0:
# accept all integers, but complain on float or double
prnt(' (void)((p->%s) << 1);' % fname)
else:
# only accept exactly the type declared.
try:
prnt(' { %s = &p->%s; (void)tmp; }' % (
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
fname))
except VerificationError as e:
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
prnt('}')
self.export_symbols.append(layoutfuncname)
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
prnt('{')
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
prnt(' static intptr_t nums[] = {')
prnt(' sizeof(%s),' % cname)
prnt(' offsetof(struct _cffi_aligncheck, y),')
for fname, ftype, fbitsize, fqual in tp.enumfields():
if fbitsize >= 0:
continue # xxx ignore fbitsize for now
prnt(' offsetof(%s, %s),' % (cname, fname))
if isinstance(ftype, model.ArrayType) and ftype.length is None:
prnt(' 0, /* %s */' % ftype._get_c_name())
else:
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
prnt(' -1')
prnt(' };')
prnt(' return nums[i];')
prnt(' /* the next line is not executed, but compiled */')
prnt(' %s(0);' % (checkfuncname,))
prnt('}')
prnt()
def _loading_struct_or_union(self, tp, prefix, name, module):
if tp.fldnames is None:
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
#
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
function = module.load_function(BFunc, layoutfuncname)
layout = []
num = 0
while True:
x = function(num)
if x < 0: break
layout.append(x)
num += 1
if isinstance(tp, model.StructOrUnion) and tp.partial:
# use the function()'s sizes and offsets to guide the
# layout of the struct
totalsize = layout[0]
totalalignment = layout[1]
fieldofs = layout[2::2]
fieldsize = layout[3::2]
tp.force_flatten()
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
else:
cname = ('%s %s' % (prefix, name)).strip()
self._struct_pending_verification[tp] = layout, cname
def _loaded_struct_or_union(self, tp):
if tp.fldnames is None:
return # nothing to do with opaque structs
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
if tp in self._struct_pending_verification:
# check that the layout sizes and offsets match the real ones
def check(realvalue, expectedvalue, msg):
if realvalue != expectedvalue:
raise VerificationError(
"%s (we have %d, but C compiler says %d)"
% (msg, expectedvalue, realvalue))
ffi = self.ffi
BStruct = ffi._get_cached_btype(tp)
layout, cname = self._struct_pending_verification.pop(tp)
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
i = 2
for fname, ftype, fbitsize, fqual in tp.enumfields():
if fbitsize >= 0:
continue # xxx ignore fbitsize for now
check(layout[i], ffi.offsetof(BStruct, fname),
"wrong offset for field %r" % (fname,))
if layout[i+1] != 0:
BField = ffi._get_cached_btype(ftype)
check(layout[i+1], ffi.sizeof(BField),
"wrong size for field %r" % (fname,))
i += 2
assert i == len(layout)
# ----------
# 'anonymous' declarations. These are produced for anonymous structs
# or unions; the 'name' is obtained by a typedef.
def _generate_gen_anonymous_decl(self, tp, name):
if isinstance(tp, model.EnumType):
self._generate_gen_enum_decl(tp, name, '')
else:
self._generate_struct_or_union_decl(tp, '', name)
def _loading_gen_anonymous(self, tp, name, module):
if isinstance(tp, model.EnumType):
self._loading_gen_enum(tp, name, module, '')
else:
self._loading_struct_or_union(tp, '', name, module)
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
if isinstance(tp, model.EnumType):
self._loaded_gen_enum(tp, name, module, **kwds)
else:
self._loaded_struct_or_union(tp)
# ----------
# constants, likely declared with '#define'
def _generate_gen_const(self, is_int, name, tp=None, category='const',
check_value=None):
prnt = self._prnt
funcname = '_cffi_%s_%s' % (category, name)
self.export_symbols.append(funcname)
if check_value is not None:
assert is_int
assert category == 'const'
prnt('int %s(char *out_error)' % funcname)
prnt('{')
self._check_int_constant_value(name, check_value)
prnt(' return 0;')
prnt('}')
elif is_int:
assert category == 'const'
prnt('int %s(long long *out_value)' % funcname)
prnt('{')
prnt(' *out_value = (long long)(%s);' % (name,))
prnt(' return (%s) <= 0;' % (name,))
prnt('}')
else:
assert tp is not None
assert check_value is None
if category == 'var':
ampersand = '&'
else:
ampersand = ''
extra = ''
if category == 'const' and isinstance(tp, model.StructOrUnion):
extra = 'const *'
ampersand = '&'
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
prnt('{')
prnt(' return (%s%s);' % (ampersand, name))
prnt('}')
prnt()
def _generate_gen_constant_decl(self, tp, name):
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
self._generate_gen_const(is_int, name, tp)
_loading_gen_constant = _loaded_noop
def _load_constant(self, is_int, tp, name, module, check_value=None):
funcname = '_cffi_const_%s' % name
if check_value is not None:
assert is_int
self._load_known_int_constant(module, funcname)
value = check_value
elif is_int:
BType = self.ffi._typeof_locked("long long*")[0]
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
function = module.load_function(BFunc, funcname)
p = self.ffi.new(BType)
negative = function(p)
value = int(p[0])
if value < 0 and not negative:
BLongLong = self.ffi._typeof_locked("long long")[0]
value += (1 << (8*self.ffi.sizeof(BLongLong)))
else:
assert check_value is None
fntypeextra = '(*)(void)'
if isinstance(tp, model.StructOrUnion):
fntypeextra = '*' + fntypeextra
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
function = module.load_function(BFunc, funcname)
value = function()
if isinstance(tp, model.StructOrUnion):
value = value[0]
return value
def _loaded_gen_constant(self, tp, name, module, library):
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
value = self._load_constant(is_int, tp, name, module)
setattr(library, name, value)
type(library)._cffi_dir.append(name)
# ----------
# enums
def _check_int_constant_value(self, name, value):
prnt = self._prnt
if value <= 0:
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
name, name, value))
else:
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
name, name, value))
prnt(' char buf[64];')
prnt(' if ((%s) <= 0)' % name)
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
prnt(' else')
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
name)
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
prnt(' "%s", buf, "%d");' % (name[:100], value))
prnt(' return -1;')
prnt(' }')
def _load_known_int_constant(self, module, funcname):
BType = self.ffi._typeof_locked("char[]")[0]
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
function = module.load_function(BFunc, funcname)
p = self.ffi.new(BType, 256)
if function(p) < 0:
error = self.ffi.string(p)
if sys.version_info >= (3,):
error = str(error, 'utf-8')
raise VerificationError(error)
def _enum_funcname(self, prefix, name):
# "$enum_$1" => "___D_enum____D_1"
name = name.replace('$', '___D_')
return '_cffi_e_%s_%s' % (prefix, name)
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
if tp.partial:
for enumerator in tp.enumerators:
self._generate_gen_const(True, enumerator)
return
#
funcname = self._enum_funcname(prefix, name)
self.export_symbols.append(funcname)
prnt = self._prnt
prnt('int %s(char *out_error)' % funcname)
prnt('{')
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
self._check_int_constant_value(enumerator, enumvalue)
prnt(' return 0;')
prnt('}')
prnt()
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
if tp.partial:
enumvalues = [self._load_constant(True, tp, enumerator, module)
for enumerator in tp.enumerators]
tp.enumvalues = tuple(enumvalues)
tp.partial_resolved = True
else:
funcname = self._enum_funcname(prefix, name)
self._load_known_int_constant(module, funcname)
def _loaded_gen_enum(self, tp, name, module, library):
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
setattr(library, enumerator, enumvalue)
type(library)._cffi_dir.append(enumerator)
# ----------
# macros: for now only for integers
def _generate_gen_macro_decl(self, tp, name):
if tp == '...':
check_value = None
else:
check_value = tp # an integer
self._generate_gen_const(True, name, check_value=check_value)
_loading_gen_macro = _loaded_noop
def _loaded_gen_macro(self, tp, name, module, library):
if tp == '...':
check_value = None
else:
check_value = tp # an integer
value = self._load_constant(True, tp, name, module,
check_value=check_value)
setattr(library, name, value)
type(library)._cffi_dir.append(name)
# ----------
# global variables
def _generate_gen_variable_decl(self, tp, name):
if isinstance(tp, model.ArrayType):
if tp.length_is_unknown():
prnt = self._prnt
funcname = '_cffi_sizeof_%s' % (name,)
self.export_symbols.append(funcname)
prnt("size_t %s(void)" % funcname)
prnt("{")
prnt(" return sizeof(%s);" % (name,))
prnt("}")
tp_ptr = model.PointerType(tp.item)
self._generate_gen_const(False, name, tp_ptr)
else:
tp_ptr = model.PointerType(tp)
self._generate_gen_const(False, name, tp_ptr, category='var')
_loading_gen_variable = _loaded_noop
def _loaded_gen_variable(self, tp, name, module, library):
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
# sense that "a=..." is forbidden
if tp.length_is_unknown():
funcname = '_cffi_sizeof_%s' % (name,)
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
function = module.load_function(BFunc, funcname)
size = function()
BItemType = self.ffi._get_cached_btype(tp.item)
length, rest = divmod(size, self.ffi.sizeof(BItemType))
if rest != 0:
raise VerificationError(
"bad size: %r does not seem to be an array of %s" %
(name, tp.item))
tp = tp.resolve_length(length)
tp_ptr = model.PointerType(tp.item)
value = self._load_constant(False, tp_ptr, name, module)
# 'value' is a <cdata 'type *'> which we have to replace with
# a <cdata 'type[N]'> if the N is actually known
if tp.length is not None:
BArray = self.ffi._get_cached_btype(tp)
value = self.ffi.cast(BArray, value)
setattr(library, name, value)
type(library)._cffi_dir.append(name)
return
# remove ptr=<cdata 'int *'> from the library instance, and replace
# it by a property on the class, which reads/writes into ptr[0].
funcname = '_cffi_var_%s' % name
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
function = module.load_function(BFunc, funcname)
ptr = function()
def getter(library):
return ptr[0]
def setter(library, value):
ptr[0] = value
setattr(type(library), name, property(getter, setter))
type(library)._cffi_dir.append(name)
cffimod_header = r'''
#include <stdio.h>
#include <stddef.h>
#include <stdarg.h>
#include <errno.h>
#include <sys/types.h> /* XXX for ssize_t on some platforms */
/* this block of #ifs should be kept exactly identical between
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
and cffi/_cffi_include.h */
#if defined(_MSC_VER)
# include <malloc.h> /* for alloca() */
# if _MSC_VER < 1600 /* MSVC < 2010 */
typedef __int8 int8_t;
typedef __int16 int16_t;
typedef __int32 int32_t;
typedef __int64 int64_t;
typedef unsigned __int8 uint8_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int64 uint64_t;
typedef __int8 int_least8_t;
typedef __int16 int_least16_t;
typedef __int32 int_least32_t;
typedef __int64 int_least64_t;
typedef unsigned __int8 uint_least8_t;
typedef unsigned __int16 uint_least16_t;
typedef unsigned __int32 uint_least32_t;
typedef unsigned __int64 uint_least64_t;
typedef __int8 int_fast8_t;
typedef __int16 int_fast16_t;
typedef __int32 int_fast32_t;
typedef __int64 int_fast64_t;
typedef unsigned __int8 uint_fast8_t;
typedef unsigned __int16 uint_fast16_t;
typedef unsigned __int32 uint_fast32_t;
typedef unsigned __int64 uint_fast64_t;
typedef __int64 intmax_t;
typedef unsigned __int64 uintmax_t;
# else
# include <stdint.h>
# endif
# if _MSC_VER < 1800 /* MSVC < 2013 */
# ifndef __cplusplus
typedef unsigned char _Bool;
# endif
# endif
#else
# include <stdint.h>
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
# include <alloca.h>
# endif
#endif
'''

View File

@ -0,0 +1,306 @@
#
# DEPRECATED: implementation for ffi.verify()
#
import sys, os, binascii, shutil, io
from . import __version_verifier_modules__
from . import ffiplatform
from .error import VerificationError
if sys.version_info >= (3, 3):
import importlib.machinery
def _extension_suffixes():
return importlib.machinery.EXTENSION_SUFFIXES[:]
else:
import imp
def _extension_suffixes():
return [suffix for suffix, _, type in imp.get_suffixes()
if type == imp.C_EXTENSION]
if sys.version_info >= (3,):
NativeIO = io.StringIO
else:
class NativeIO(io.BytesIO):
def write(self, s):
if isinstance(s, unicode):
s = s.encode('ascii')
super(NativeIO, self).write(s)
class Verifier(object):
def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
ext_package=None, tag='', force_generic_engine=False,
source_extension='.c', flags=None, relative_to=None, **kwds):
if ffi._parser._uses_new_feature:
raise VerificationError(
"feature not supported with ffi.verify(), but only "
"with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
self.ffi = ffi
self.preamble = preamble
if not modulename:
flattened_kwds = ffiplatform.flatten(kwds)
vengine_class = _locate_engine_class(ffi, force_generic_engine)
self._vengine = vengine_class(self)
self._vengine.patch_extension_kwds(kwds)
self.flags = flags
self.kwds = self.make_relative_to(kwds, relative_to)
#
if modulename:
if tag:
raise TypeError("can't specify both 'modulename' and 'tag'")
else:
key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
preamble, flattened_kwds] +
ffi._cdefsources)
if sys.version_info >= (3,):
key = key.encode('utf-8')
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
k1 = k1.lstrip('0x').rstrip('L')
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
k2 = k2.lstrip('0').rstrip('L')
modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
k1, k2)
suffix = _get_so_suffixes()[0]
self.tmpdir = tmpdir or _caller_dir_pycache()
self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
self.ext_package = ext_package
self._has_source = False
self._has_module = False
def write_source(self, file=None):
"""Write the C source code. It is produced in 'self.sourcefilename',
which can be tweaked beforehand."""
with self.ffi._lock:
if self._has_source and file is None:
raise VerificationError(
"source code already written")
self._write_source(file)
def compile_module(self):
"""Write the C source code (if not done already) and compile it.
This produces a dynamic link library in 'self.modulefilename'."""
with self.ffi._lock:
if self._has_module:
raise VerificationError("module already compiled")
if not self._has_source:
self._write_source()
self._compile_module()
def load_library(self):
"""Get a C module from this Verifier instance.
Returns an instance of a FFILibrary class that behaves like the
objects returned by ffi.dlopen(), but that delegates all
operations to the C module. If necessary, the C code is written
and compiled first.
"""
with self.ffi._lock:
if not self._has_module:
self._locate_module()
if not self._has_module:
if not self._has_source:
self._write_source()
self._compile_module()
return self._load_library()
def get_module_name(self):
basename = os.path.basename(self.modulefilename)
# kill both the .so extension and the other .'s, as introduced
# by Python 3: 'basename.cpython-33m.so'
basename = basename.split('.', 1)[0]
# and the _d added in Python 2 debug builds --- but try to be
# conservative and not kill a legitimate _d
if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
basename = basename[:-2]
return basename
def get_extension(self):
ffiplatform._hack_at_distutils() # backward compatibility hack
if not self._has_source:
with self.ffi._lock:
if not self._has_source:
self._write_source()
sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
modname = self.get_module_name()
return ffiplatform.get_extension(sourcename, modname, **self.kwds)
def generates_python_module(self):
return self._vengine._gen_python_module
def make_relative_to(self, kwds, relative_to):
if relative_to and os.path.dirname(relative_to):
dirname = os.path.dirname(relative_to)
kwds = kwds.copy()
for key in ffiplatform.LIST_OF_FILE_NAMES:
if key in kwds:
lst = kwds[key]
if not isinstance(lst, (list, tuple)):
raise TypeError("keyword '%s' should be a list or tuple"
% (key,))
lst = [os.path.join(dirname, fn) for fn in lst]
kwds[key] = lst
return kwds
# ----------
def _locate_module(self):
if not os.path.isfile(self.modulefilename):
if self.ext_package:
try:
pkg = __import__(self.ext_package, None, None, ['__doc__'])
except ImportError:
return # cannot import the package itself, give up
# (e.g. it might be called differently before installation)
path = pkg.__path__
else:
path = None
filename = self._vengine.find_module(self.get_module_name(), path,
_get_so_suffixes())
if filename is None:
return
self.modulefilename = filename
self._vengine.collect_types()
self._has_module = True
def _write_source_to(self, file):
self._vengine._f = file
try:
self._vengine.write_source_to_f()
finally:
del self._vengine._f
def _write_source(self, file=None):
if file is not None:
self._write_source_to(file)
else:
# Write our source file to an in memory file.
f = NativeIO()
self._write_source_to(f)
source_data = f.getvalue()
# Determine if this matches the current file
if os.path.exists(self.sourcefilename):
with open(self.sourcefilename, "r") as fp:
needs_written = not (fp.read() == source_data)
else:
needs_written = True
# Actually write the file out if it doesn't match
if needs_written:
_ensure_dir(self.sourcefilename)
with open(self.sourcefilename, "w") as fp:
fp.write(source_data)
# Set this flag
self._has_source = True
def _compile_module(self):
# compile this C source
tmpdir = os.path.dirname(self.sourcefilename)
outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
try:
same = ffiplatform.samefile(outputfilename, self.modulefilename)
except OSError:
same = False
if not same:
_ensure_dir(self.modulefilename)
shutil.move(outputfilename, self.modulefilename)
self._has_module = True
def _load_library(self):
assert self._has_module
if self.flags is not None:
return self._vengine.load_library(self.flags)
else:
return self._vengine.load_library()
# ____________________________________________________________
_FORCE_GENERIC_ENGINE = False # for tests
def _locate_engine_class(ffi, force_generic_engine):
if _FORCE_GENERIC_ENGINE:
force_generic_engine = True
if not force_generic_engine:
if '__pypy__' in sys.builtin_module_names:
force_generic_engine = True
else:
try:
import _cffi_backend
except ImportError:
_cffi_backend = '?'
if ffi._backend is not _cffi_backend:
force_generic_engine = True
if force_generic_engine:
from . import vengine_gen
return vengine_gen.VGenericEngine
else:
from . import vengine_cpy
return vengine_cpy.VCPythonEngine
# ____________________________________________________________
_TMPDIR = None
def _caller_dir_pycache():
if _TMPDIR:
return _TMPDIR
result = os.environ.get('CFFI_TMPDIR')
if result:
return result
filename = sys._getframe(2).f_code.co_filename
return os.path.abspath(os.path.join(os.path.dirname(filename),
'__pycache__'))
def set_tmpdir(dirname):
"""Set the temporary directory to use instead of __pycache__."""
global _TMPDIR
_TMPDIR = dirname
def cleanup_tmpdir(tmpdir=None, keep_so=False):
"""Clean up the temporary directory by removing all files in it
called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
tmpdir = tmpdir or _caller_dir_pycache()
try:
filelist = os.listdir(tmpdir)
except OSError:
return
if keep_so:
suffix = '.c' # only remove .c files
else:
suffix = _get_so_suffixes()[0].lower()
for fn in filelist:
if fn.lower().startswith('_cffi_') and (
fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
try:
os.unlink(os.path.join(tmpdir, fn))
except OSError:
pass
clean_dir = [os.path.join(tmpdir, 'build')]
for dir in clean_dir:
try:
for fn in os.listdir(dir):
fn = os.path.join(dir, fn)
if os.path.isdir(fn):
clean_dir.append(fn)
else:
os.unlink(fn)
except OSError:
pass
def _get_so_suffixes():
suffixes = _extension_suffixes()
if not suffixes:
# bah, no C_EXTENSION available. Occurs on pypy without cpyext
if sys.platform == 'win32':
suffixes = [".pyd"]
else:
suffixes = [".so"]
return suffixes
def _ensure_dir(filename):
dirname = os.path.dirname(filename)
if dirname and not os.path.isdir(dirname):
os.makedirs(dirname)

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,504 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

View File

@ -0,0 +1,101 @@
Metadata-Version: 2.1
Name: chardet
Version: 4.0.0
Summary: Universal encoding detector for Python 2 and 3
Home-page: https://github.com/chardet/chardet
Author: Mark Pilgrim
Author-email: mark@diveintomark.org
Maintainer: Daniel Blanchard
Maintainer-email: dan.blanchard@gmail.com
License: LGPL
Keywords: encoding,i18n,xml
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Linguistic
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
Chardet: The Universal Character Encoding Detector
--------------------------------------------------
.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg
:alt: Build status
:target: https://travis-ci.org/chardet/chardet
.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg
:target: https://coveralls.io/r/chardet/chardet
.. image:: https://img.shields.io/pypi/v/chardet.svg
:target: https://warehouse.python.org/project/chardet/
:alt: Latest version on PyPI
.. image:: https://img.shields.io/pypi/l/chardet.svg
:alt: License
Detects
- ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants)
- Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese)
- EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese)
- EUC-KR, ISO-2022-KR (Korean)
- KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic)
- ISO-8859-5, windows-1251 (Bulgarian)
- ISO-8859-1, windows-1252 (Western European languages)
- ISO-8859-7, windows-1253 (Greek)
- ISO-8859-8, windows-1255 (Visual and Logical Hebrew)
- TIS-620 (Thai)
.. note::
Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily
disabled until we can retrain the models.
Requires Python 2.7 or 3.5+.
Installation
------------
Install from `PyPI <https://pypi.org/project/chardet/>`_::
pip install chardet
Documentation
-------------
For users, docs are now available at https://chardet.readthedocs.io/.
Command-line Tool
-----------------
chardet comes with a command-line script which reports on the encodings of one
or more files::
% chardetect somefile someotherfile
somefile: windows-1252 with confidence 0.5
someotherfile: ascii with confidence 1.0
About
-----
This is a continuation of Mark Pilgrim's excellent chardet. Previously, two
versions needed to be maintained: one that supported python 2.x and one that
supported python 3.x. We've recently merged with `Ian Cordasco <https://github.com/sigmavirus24>`_'s
`charade <https://github.com/sigmavirus24/charade>`_ fork, so now we have one
coherent version that works for Python 2.7+ and 3.4+.
:maintainer: Dan Blanchard

View File

@ -0,0 +1,95 @@
../../Scripts/chardetect.exe,sha256=ClgAeD3Mn81aLRmPGMoGgR5a3feCTCoxVtc2j3vNol4,106393
chardet-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
chardet-4.0.0.dist-info/LICENSE,sha256=YJXp_6d33SKDn3gBqoRbMcntB_PWv4om3F0t7IzMDvM,26432
chardet-4.0.0.dist-info/METADATA,sha256=ySYQAE7NPm3LwxgMqFi1zdLQ48mmwMbrJwqAWCtcbH8,3526
chardet-4.0.0.dist-info/RECORD,,
chardet-4.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
chardet-4.0.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
chardet-4.0.0.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60
chardet-4.0.0.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8
chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271
chardet/__pycache__/__init__.cpython-36.pyc,,
chardet/__pycache__/big5freq.cpython-36.pyc,,
chardet/__pycache__/big5prober.cpython-36.pyc,,
chardet/__pycache__/chardistribution.cpython-36.pyc,,
chardet/__pycache__/charsetgroupprober.cpython-36.pyc,,
chardet/__pycache__/charsetprober.cpython-36.pyc,,
chardet/__pycache__/codingstatemachine.cpython-36.pyc,,
chardet/__pycache__/compat.cpython-36.pyc,,
chardet/__pycache__/cp949prober.cpython-36.pyc,,
chardet/__pycache__/enums.cpython-36.pyc,,
chardet/__pycache__/escprober.cpython-36.pyc,,
chardet/__pycache__/escsm.cpython-36.pyc,,
chardet/__pycache__/eucjpprober.cpython-36.pyc,,
chardet/__pycache__/euckrfreq.cpython-36.pyc,,
chardet/__pycache__/euckrprober.cpython-36.pyc,,
chardet/__pycache__/euctwfreq.cpython-36.pyc,,
chardet/__pycache__/euctwprober.cpython-36.pyc,,
chardet/__pycache__/gb2312freq.cpython-36.pyc,,
chardet/__pycache__/gb2312prober.cpython-36.pyc,,
chardet/__pycache__/hebrewprober.cpython-36.pyc,,
chardet/__pycache__/jisfreq.cpython-36.pyc,,
chardet/__pycache__/jpcntx.cpython-36.pyc,,
chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,,
chardet/__pycache__/langgreekmodel.cpython-36.pyc,,
chardet/__pycache__/langhebrewmodel.cpython-36.pyc,,
chardet/__pycache__/langhungarianmodel.cpython-36.pyc,,
chardet/__pycache__/langrussianmodel.cpython-36.pyc,,
chardet/__pycache__/langthaimodel.cpython-36.pyc,,
chardet/__pycache__/langturkishmodel.cpython-36.pyc,,
chardet/__pycache__/latin1prober.cpython-36.pyc,,
chardet/__pycache__/mbcharsetprober.cpython-36.pyc,,
chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,,
chardet/__pycache__/mbcssm.cpython-36.pyc,,
chardet/__pycache__/sbcharsetprober.cpython-36.pyc,,
chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,,
chardet/__pycache__/sjisprober.cpython-36.pyc,,
chardet/__pycache__/universaldetector.cpython-36.pyc,,
chardet/__pycache__/utf8prober.cpython-36.pyc,,
chardet/__pycache__/version.cpython-36.pyc,,
chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839
chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
chardet/cli/__pycache__/__init__.cpython-36.pyc,,
chardet/cli/__pycache__/chardetect.cpython-36.pyc,,
chardet/cli/chardetect.py,sha256=kUPeQCi-olObXpOq5MtlKuBn1EU19rkeenAMwxl7URY,2711
chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200
chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
chardet/langbulgarianmodel.py,sha256=r6tvOtO8FqhnbWBB5V4czcl1fWM4pB9lGiWQU-8gvsw,105685
chardet/langgreekmodel.py,sha256=1cMu2wUgPB8bQ2RbVjR4LNwCCETgQ-Dwo0Eg2_uB11s,99559
chardet/langhebrewmodel.py,sha256=urMmJHHIXtCwaWAqy1zEY_4SmwwNzt730bDOtjXzRjs,98764
chardet/langhungarianmodel.py,sha256=ODAisvqCfes8B4FeyM_Pg9HY3ZDnEyaCiT4Bxyzoc6w,102486
chardet/langrussianmodel.py,sha256=sPqkrBbX0QVwwy6oqRl-x7ERv2J4-zaMoCvLpkSsSJI,131168
chardet/langthaimodel.py,sha256=ppoKOGL9OPdj9A4CUyG8R48zbnXt9MN1WXeCYepa6sc,103300
chardet/langturkishmodel.py,sha256=H3ldicI_rhlv0r3VFpVWtUL6X30Wy596v7_YHz2sEdg,95934
chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
chardet/metadata/__pycache__/__init__.cpython-36.pyc,,
chardet/metadata/__pycache__/languages.cpython-36.pyc,,
chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474
chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136
chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309
chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503
chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.35.1)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View File

@ -0,0 +1,3 @@
[console_scripts]
chardetect = chardet.cli.chardetect:main

View File

@ -0,0 +1 @@
chardet

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,310 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Metadata about languages used by our model training code for our
SingleByteCharSetProbers. Could be used for other things in the future.
This code is based on the language metadata from the uchardet project.
"""
from __future__ import absolute_import, print_function
from string import ascii_letters
# TODO: Add Ukranian (KOI8-U)
class Language(object):
"""Metadata about a language useful for training models
:ivar name: The human name for the language, in English.
:type name: str
:ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise,
or use another catalog as a last resort.
:type iso_code: str
:ivar use_ascii: Whether or not ASCII letters should be included in trained
models.
:type use_ascii: bool
:ivar charsets: The charsets we want to support and create data for.
:type charsets: list of str
:ivar alphabet: The characters in the language's alphabet. If `use_ascii` is
`True`, you only need to add those not in the ASCII set.
:type alphabet: str
:ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling
Wikipedia for training data.
:type wiki_start_pages: list of str
"""
def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None,
alphabet=None, wiki_start_pages=None):
super(Language, self).__init__()
self.name = name
self.iso_code = iso_code
self.use_ascii = use_ascii
self.charsets = charsets
if self.use_ascii:
if alphabet:
alphabet += ascii_letters
else:
alphabet = ascii_letters
elif not alphabet:
raise ValueError('Must supply alphabet if use_ascii is False')
self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None
self.wiki_start_pages = wiki_start_pages
def __repr__(self):
return '{}({})'.format(self.__class__.__name__,
', '.join('{}={!r}'.format(k, v)
for k, v in self.__dict__.items()
if not k.startswith('_')))
LANGUAGES = {'Arabic': Language(name='Arabic',
iso_code='ar',
use_ascii=False,
# We only support encodings that use isolated
# forms, because the current recommendation is
# that the rendering system handles presentation
# forms. This means we purposefully skip IBM864.
charsets=['ISO-8859-6', 'WINDOWS-1256',
'CP720', 'CP864'],
alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ',
wiki_start_pages=[u'الصفحة_الرئيسية']),
'Belarusian': Language(name='Belarusian',
iso_code='be',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'IBM866', 'MacCyrillic'],
alphabet=(u'АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯ'
u'абвгдеёжзійклмнопрстуўфхцчшыьэюяʼ'),
wiki_start_pages=[u'Галоўная_старонка']),
'Bulgarian': Language(name='Bulgarian',
iso_code='bg',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'IBM855'],
alphabet=(u'АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯ'
u'абвгдежзийклмнопрстуфхцчшщъьюя'),
wiki_start_pages=[u'Начална_страница']),
'Czech': Language(name='Czech',
iso_code='cz',
use_ascii=True,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=u'áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ',
wiki_start_pages=[u'Hlavní_strana']),
'Danish': Language(name='Danish',
iso_code='da',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'æøåÆØÅ',
wiki_start_pages=[u'Forside']),
'German': Language(name='German',
iso_code='de',
use_ascii=True,
charsets=['ISO-8859-1', 'WINDOWS-1252'],
alphabet=u'äöüßÄÖÜ',
wiki_start_pages=[u'Wikipedia:Hauptseite']),
'Greek': Language(name='Greek',
iso_code='el',
use_ascii=False,
charsets=['ISO-8859-7', 'WINDOWS-1253'],
alphabet=(u'αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώ'
u'ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ'),
wiki_start_pages=[u'Πύλη:Κύρια']),
'English': Language(name='English',
iso_code='en',
use_ascii=True,
charsets=['ISO-8859-1', 'WINDOWS-1252'],
wiki_start_pages=[u'Main_Page']),
'Esperanto': Language(name='Esperanto',
iso_code='eo',
# Q, W, X, and Y not used at all
use_ascii=False,
charsets=['ISO-8859-3'],
alphabet=(u'abcĉdefgĝhĥijĵklmnoprsŝtuŭvz'
u'ABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ'),
wiki_start_pages=[u'Vikipedio:Ĉefpaĝo']),
'Spanish': Language(name='Spanish',
iso_code='es',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ñáéíóúüÑÁÉÍÓÚÜ',
wiki_start_pages=[u'Wikipedia:Portada']),
'Estonian': Language(name='Estonian',
iso_code='et',
use_ascii=False,
charsets=['ISO-8859-4', 'ISO-8859-13',
'WINDOWS-1257'],
# C, F, Š, Q, W, X, Y, Z, Ž are only for
# loanwords
alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ'
u'abdeghijklmnoprstuvõäöü'),
wiki_start_pages=[u'Esileht']),
'Finnish': Language(name='Finnish',
iso_code='fi',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ÅÄÖŠŽåäöšž',
wiki_start_pages=[u'Wikipedia:Etusivu']),
'French': Language(name='French',
iso_code='fr',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ',
wiki_start_pages=[u'Wikipédia:Accueil_principal',
u'Bœuf (animal)']),
'Hebrew': Language(name='Hebrew',
iso_code='he',
use_ascii=False,
charsets=['ISO-8859-8', 'WINDOWS-1255'],
alphabet=u'אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ',
wiki_start_pages=[u'עמוד_ראשי']),
'Croatian': Language(name='Croatian',
iso_code='hr',
# Q, W, X, Y are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'abcčćdđefghijklmnoprsštuvzž'
u'ABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ'),
wiki_start_pages=[u'Glavna_stranica']),
'Hungarian': Language(name='Hungarian',
iso_code='hu',
# Q, W, X, Y are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű'
u'ABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ'),
wiki_start_pages=[u'Kezdőlap']),
'Italian': Language(name='Italian',
iso_code='it',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ÀÈÉÌÒÓÙàèéìòóù',
wiki_start_pages=[u'Pagina_principale']),
'Lithuanian': Language(name='Lithuanian',
iso_code='lt',
use_ascii=False,
charsets=['ISO-8859-13', 'WINDOWS-1257',
'ISO-8859-4'],
# Q, W, and X not used at all
alphabet=(u'AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽ'
u'aąbcčdeęėfghiįyjklmnoprsštuųūvzž'),
wiki_start_pages=[u'Pagrindinis_puslapis']),
'Latvian': Language(name='Latvian',
iso_code='lv',
use_ascii=False,
charsets=['ISO-8859-13', 'WINDOWS-1257',
'ISO-8859-4'],
# Q, W, X, Y are only for loanwords
alphabet=(u'AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽ'
u'aābcčdeēfgģhiījkķlļmnņoprsštuūvzž'),
wiki_start_pages=[u'Sākumlapa']),
'Macedonian': Language(name='Macedonian',
iso_code='mk',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'MacCyrillic', 'IBM855'],
alphabet=(u'АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШ'
u'абвгдѓежзѕијклљмнњопрстќуфхцчџш'),
wiki_start_pages=[u'Главна_страница']),
'Dutch': Language(name='Dutch',
iso_code='nl',
use_ascii=True,
charsets=['ISO-8859-1', 'WINDOWS-1252'],
wiki_start_pages=[u'Hoofdpagina']),
'Polish': Language(name='Polish',
iso_code='pl',
# Q and X are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻ'
u'aąbcćdeęfghijklłmnńoóprsśtuwyzźż'),
wiki_start_pages=[u'Wikipedia:Strona_główna']),
'Portuguese': Language(name='Portuguese',
iso_code='pt',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú',
wiki_start_pages=[u'Wikipédia:Página_principal']),
'Romanian': Language(name='Romanian',
iso_code='ro',
use_ascii=True,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=u'ăâîșțĂÂÎȘȚ',
wiki_start_pages=[u'Pagina_principală']),
'Russian': Language(name='Russian',
iso_code='ru',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'KOI8-R', 'MacCyrillic', 'IBM866',
'IBM855'],
alphabet=(u'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
u'АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ'),
wiki_start_pages=[u'Заглавная_страница']),
'Slovak': Language(name='Slovak',
iso_code='sk',
use_ascii=True,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=u'áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ',
wiki_start_pages=[u'Hlavná_stránka']),
'Slovene': Language(name='Slovene',
iso_code='sl',
# Q, W, X, Y are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'abcčdefghijklmnoprsštuvzž'
u'ABCČDEFGHIJKLMNOPRSŠTUVZŽ'),
wiki_start_pages=[u'Glavna_stran']),
# Serbian can be written in both Latin and Cyrillic, but there's no
# simple way to get the Latin alphabet pages from Wikipedia through
# the API, so for now we just support Cyrillic.
'Serbian': Language(name='Serbian',
iso_code='sr',
alphabet=(u'АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШ'
u'абвгдђежзијклљмнњопрстћуфхцчџш'),
charsets=['ISO-8859-5', 'WINDOWS-1251',
'MacCyrillic', 'IBM855'],
wiki_start_pages=[u'Главна_страна']),
'Thai': Language(name='Thai',
iso_code='th',
use_ascii=False,
charsets=['ISO-8859-11', 'TIS-620', 'CP874'],
alphabet=u'กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛',
wiki_start_pages=[u'หน้าหลัก']),
'Turkish': Language(name='Turkish',
iso_code='tr',
# Q, W, and X are not used by Turkish
use_ascii=False,
charsets=['ISO-8859-3', 'ISO-8859-9',
'WINDOWS-1254'],
alphabet=(u'abcçdefgğhıijklmnoöprsştuüvyzâîû'
u'ABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ'),
wiki_start_pages=[u'Ana_Sayfa']),
'Vietnamese': Language(name='Vietnamese',
iso_code='vi',
use_ascii=False,
# Windows-1258 is the only common 8-bit
# Vietnamese encoding supported by Python.
# From Wikipedia:
# For systems that lack support for Unicode,
# dozens of 8-bit Vietnamese code pages are
# available.[1] The most common are VISCII
# (TCVN 5712:1993), VPS, and Windows-1258.[3]
# Where ASCII is required, such as when
# ensuring readability in plain text e-mail,
# Vietnamese letters are often encoded
# according to Vietnamese Quoted-Readable
# (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4]
# though usage of either variable-width
# scheme has declined dramatically following
# the adoption of Unicode on the World Wide
# Web.
charsets=['WINDOWS-1258'],
alphabet=(u'aăâbcdđeêghiklmnoôơpqrstuưvxy'
u'AĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY'),
wiki_start_pages=[u'Chữ_Quốc_ngữ']),
}

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,6 @@
This software is made available under the terms of *either* of the licenses
found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made
under the terms of *both* these licenses.
The code used in the OS random engine is derived from CPython, and is licensed
under the terms of the PSF License Agreement.

View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,27 @@
Copyright (c) Individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of PyCA Cryptography nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1,41 @@
1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
the Individual or Organization ("Licensee") accessing and otherwise using Python
2.7.12 software in source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python 2.7.12 alone or in any derivative
version, provided, however, that PSF's License Agreement and PSF's notice of
copyright, i.e., "Copyright © 2001-2016 Python Software Foundation; All Rights
Reserved" are retained in Python 2.7.12 alone or in any derivative version
prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on or
incorporates Python 2.7.12 or any part thereof, and wants to make the
derivative work available to others as provided herein, then Licensee hereby
agrees to include in any such work a brief summary of the changes made to Python
2.7.12.
4. PSF is making Python 2.7.12 available to Licensee on an "AS IS" basis.
PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
USE OF PYTHON 2.7.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.12
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.12, OR ANY DERIVATIVE
THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material breach of
its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any relationship
of agency, partnership, or joint venture between PSF and Licensee. This License
Agreement does not grant permission to use PSF trademarks or trade name in a
trademark sense to endorse or promote products or services of Licensee, or any
third party.
8. By copying, installing or otherwise using Python 2.7.12, Licensee agrees
to be bound by the terms and conditions of this License Agreement.

View File

@ -0,0 +1,132 @@
Metadata-Version: 2.1
Name: cryptography
Version: 3.4.7
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
Home-page: https://github.com/pyca/cryptography
Author: The Python Cryptographic Authority and individual contributors
Author-email: cryptography-dev@python.org
License: BSD or Apache License, Version 2.0
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: License :: OSI Approved :: BSD License
Classifier: Natural Language :: English
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX
Classifier: Operating System :: POSIX :: BSD
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Microsoft :: Windows
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Security :: Cryptography
Requires-Python: >=3.6
Description-Content-Type: text/x-rst
Requires-Dist: cffi (>=1.12)
Provides-Extra: docs
Requires-Dist: sphinx (!=1.8.0,!=3.1.0,!=3.1.1,>=1.6.5) ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
Provides-Extra: docstest
Requires-Dist: doc8 ; extra == 'docstest'
Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest'
Requires-Dist: twine (>=1.12.0) ; extra == 'docstest'
Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest'
Provides-Extra: pep8test
Requires-Dist: black ; extra == 'pep8test'
Requires-Dist: flake8 ; extra == 'pep8test'
Requires-Dist: flake8-import-order ; extra == 'pep8test'
Requires-Dist: pep8-naming ; extra == 'pep8test'
Provides-Extra: sdist
Requires-Dist: setuptools-rust (>=0.11.4) ; extra == 'sdist'
Provides-Extra: ssh
Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh'
Provides-Extra: test
Requires-Dist: pytest (>=6.0) ; extra == 'test'
Requires-Dist: pytest-cov ; extra == 'test'
Requires-Dist: pytest-subtests ; extra == 'test'
Requires-Dist: pytest-xdist ; extra == 'test'
Requires-Dist: pretend ; extra == 'test'
Requires-Dist: iso8601 ; extra == 'test'
Requires-Dist: pytz ; extra == 'test'
Requires-Dist: hypothesis (!=3.79.2,>=1.11.4) ; extra == 'test'
pyca/cryptography
=================
.. image:: https://img.shields.io/pypi/v/cryptography.svg
:target: https://pypi.org/project/cryptography/
:alt: Latest Version
.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
:target: https://cryptography.io
:alt: Latest Docs
.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=master
:target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amaster
.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master
:target: https://codecov.io/github/pyca/cryptography?branch=master
``cryptography`` is a package which provides cryptographic recipes and
primitives to Python developers. Our goal is for it to be your "cryptographic
standard library". It supports Python 3.6+ and PyPy3 7.2+.
``cryptography`` includes both high level recipes and low level interfaces to
common cryptographic algorithms such as symmetric ciphers, message digests, and
key derivation functions. For example, to encrypt something with
``cryptography``'s high level symmetric encryption recipe:
.. code-block:: pycon
>>> from cryptography.fernet import Fernet
>>> # Put this somewhere safe!
>>> key = Fernet.generate_key()
>>> f = Fernet(key)
>>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
>>> token
'...'
>>> f.decrypt(token)
'A really secret message. Not for prying eyes.'
You can find more information in the `documentation`_.
You can install ``cryptography`` with:
.. code-block:: console
$ pip install cryptography
For full details see `the installation documentation`_.
Discussion
~~~~~~~~~~
If you run into bugs, you can file them in our `issue tracker`_.
We maintain a `cryptography-dev`_ mailing list for development discussion.
You can also join ``#cryptography-dev`` on Freenode to ask questions or get
involved.
Security
~~~~~~~~
Need to report a security issue? Please consult our `security reporting`_
documentation.
.. _`documentation`: https://cryptography.io/
.. _`the installation documentation`: https://cryptography.io/en/latest/installation.html
.. _`issue tracker`: https://github.com/pyca/cryptography/issues
.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
.. _`security reporting`: https://cryptography.io/en/latest/security.html

View File

@ -0,0 +1,188 @@
cryptography-3.4.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cryptography-3.4.7.dist-info/LICENSE,sha256=Q9rSzHUqtyHNmp827OcPtTq3cTVR8tPYaU2OjFoG1uI,323
cryptography-3.4.7.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
cryptography-3.4.7.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
cryptography-3.4.7.dist-info/LICENSE.PSF,sha256=aT7ApmKzn5laTyUrA6YiKUVHDBtvEsoCkY5O_g32S58,2415
cryptography-3.4.7.dist-info/METADATA,sha256=2JEXX_QUjtiVtEtIzqJ3f_7TM2bW1EMj8t-Hc3Xlb88,5068
cryptography-3.4.7.dist-info/RECORD,,
cryptography-3.4.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography-3.4.7.dist-info/WHEEL,sha256=JPKSD7xGWsxAONUKQHSxETBJiRc79CNcZN6uTQD0kbc,100
cryptography-3.4.7.dist-info/top_level.txt,sha256=rR2wh6A6juD02TBZNJqqonh8x9UP9Sa5Z9Hl1pCPCiM,31
cryptography/__about__.py,sha256=qu_NKv71xggkNazDGdBaYST79u_xvuqrnNA3Pm2eH9Q,805
cryptography/__init__.py,sha256=qZ9_96xJ8au-AKkdk2Kq60RKN7zGaim_8YY_rAy3_QY,511
cryptography/__pycache__/__about__.cpython-36.pyc,,
cryptography/__pycache__/__init__.cpython-36.pyc,,
cryptography/__pycache__/exceptions.cpython-36.pyc,,
cryptography/__pycache__/fernet.cpython-36.pyc,,
cryptography/__pycache__/utils.cpython-36.pyc,,
cryptography/exceptions.py,sha256=W25jw80RaAL0NOppZt48x1LSmgqaZqAObTtUExWCh3k,1194
cryptography/fernet.py,sha256=Kn_d3z5YFnFP2t9pbX9wpsm7nvlrY7oKO3XLthdstmg,6538
cryptography/hazmat/__init__.py,sha256=OYlvgprzULzZlsf3yYTsd6VUVyQmpsbHjgJdNnsyRwE,418
cryptography/hazmat/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/__pycache__/_der.cpython-36.pyc,,
cryptography/hazmat/__pycache__/_oid.cpython-36.pyc,,
cryptography/hazmat/__pycache__/_types.cpython-36.pyc,,
cryptography/hazmat/_der.py,sha256=1Kf4nwKRUt56KpG3a9Idgn0YFeUcnYecoN60p5oZRcA,5221
cryptography/hazmat/_oid.py,sha256=GVsyziASzIVcnAP_C7dx4czeI_VIccYu9GNV03rWjI0,2372
cryptography/hazmat/_types.py,sha256=TWd5Q_pS_iDOoUdP3MrYbNbPwwM2hSdONh7230eByto,646
cryptography/hazmat/backends/__init__.py,sha256=StVq0WWDbGTx0nsqMxVclREpGYp4j467m-k87xuDQRY,576
cryptography/hazmat/backends/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/backends/__pycache__/interfaces.cpython-36.pyc,,
cryptography/hazmat/backends/interfaces.py,sha256=7_PB6ZpxcRhPSXrZcseOy1u9nQcdb6jXpgf_FDliPQU,10472
cryptography/hazmat/backends/openssl/__init__.py,sha256=7rpz1Z3eV9vZy_d2iLrwC8Oz0vEruDFrjJlc6W2ZDXA,271
cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/dh.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ed25519.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ed448.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ocsp.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/poly1305.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/x25519.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/x448.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-36.pyc,,
cryptography/hazmat/backends/openssl/aead.py,sha256=zt8ZQ-JethHblWEfwAnB5-09JIL9K8qU1NXwPTjeVYA,5700
cryptography/hazmat/backends/openssl/backend.py,sha256=HC-d83ZUru3Z11Q7UnjFuko8Jp-ZEHCjzkpocJEfctM,105287
cryptography/hazmat/backends/openssl/ciphers.py,sha256=fUn5DLrbhI_upLKMvU0aX2_An1dOX8T14PgdZXZr6hU,8611
cryptography/hazmat/backends/openssl/cmac.py,sha256=KXcwF1XlY0Ew6sTBqPj0I1vr62dfMwCjeV3qBosIw8s,2846
cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=9s52X0DBtY4zSM0-nPze7A7nho3aM5nCbRa5T4bCvEU,32254
cryptography/hazmat/backends/openssl/dh.py,sha256=cVPA_PKT4BlT4OvHiJm5ZIDmxNeXBnWy2My4uz8wYpo,10565
cryptography/hazmat/backends/openssl/dsa.py,sha256=eyWzcpZggJuHLD4U3F9-neLyUqIoEN0MAiSwPIcEw2I,10684
cryptography/hazmat/backends/openssl/ec.py,sha256=AOKJntDH0-vRCH_BquHiC8RpkM4ENFv509IX7Myuong,13371
cryptography/hazmat/backends/openssl/ed25519.py,sha256=bSlMfJedRoyzZXoJeaehj_0H_j6Ye5doQHgnib602-Q,5789
cryptography/hazmat/backends/openssl/ed448.py,sha256=dpJf1zt_o8vfVcXYi_PD8d9H-jBbYEp-d6ZIYDKlC1s,5743
cryptography/hazmat/backends/openssl/encode_asn1.py,sha256=aiTahXPWVoG-e_0a8aSlE-OIosoT605P_SKZOpB-mJM,23988
cryptography/hazmat/backends/openssl/hashes.py,sha256=_XZc3glydVD88e0qoHqvOuQ_0xfl2sq0ywfZF4dH91s,3090
cryptography/hazmat/backends/openssl/hmac.py,sha256=ATz-rzSjGiRjL9_I5WJRO3R7QCiujd0izNqYrqPAHsA,2933
cryptography/hazmat/backends/openssl/ocsp.py,sha256=pV4Js2tyOcZPdeeNjFl835COi200yRTt-0PUx9MRGlY,14617
cryptography/hazmat/backends/openssl/poly1305.py,sha256=0hJDAb4pl9dJ_2xgt-XkNfyFA6U_IFXCe5jzOg7gkG0,2327
cryptography/hazmat/backends/openssl/rsa.py,sha256=3GaXjh3j2LwK4idwSHfaqxVMhhDPKftw8CerJDyRLmQ,20919
cryptography/hazmat/backends/openssl/utils.py,sha256=k3i_ARXsPvGTEtUUbnWkg9CkiJgPP4Y0VTTLtOEzEmU,2283
cryptography/hazmat/backends/openssl/x25519.py,sha256=kCnWzuchrJn1Nne4zeotKvlkMty9p3VuM8y1EWo70vQ,4622
cryptography/hazmat/backends/openssl/x448.py,sha256=8OKYMNXDR7UlViU3sNIH5qmLMGP7J-F3OeEaRK0aots,4141
cryptography/hazmat/backends/openssl/x509.py,sha256=mbiJfQrTu_G3jttY_FXRZvqZ8wkjiHcMiPsPlwVHyOg,22831
cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/bindings/_openssl.pyd,sha256=QPVkfC9hg1IK3qEJtrXhd5UGXFuWAeN597glm8qjV2c,3116544
cryptography/hazmat/bindings/_padding.pyd,sha256=-c-Y8RAqzkwvqiYYh60XJgAPf3CHHwuTJAjPUnp8I_M,13824
cryptography/hazmat/bindings/_rust.pyd,sha256=GAtX4QVpMRlZySoJA053H-j25VWkSdC_rR7Fm73R9go,185344
cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-36.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-36.pyc,,
cryptography/hazmat/bindings/openssl/_conditional.py,sha256=2yZw_Ekya_GKKWUMzUbj3yYrLFZQNproXx1N4HL7TbU,8251
cryptography/hazmat/bindings/openssl/binding.py,sha256=mIwnL3fICywOLt-iXZIvw2ijSaOIvdYs1Lwk2FUcxYs,5812
cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/_serialization.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/cmac.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/constant_time.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/hashes.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/hmac.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/keywrap.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/padding.cpython-36.pyc,,
cryptography/hazmat/primitives/__pycache__/poly1305.cpython-36.pyc,,
cryptography/hazmat/primitives/_asymmetric.py,sha256=nVJwmxkakirAXfFp410pC4kY_CinzN5FSJwhEn2IE34,485
cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=sV8-SjhhY4WtHsaLI7e2x4o2cYAAqP8YWBjhC6k1u10,1000
cryptography/hazmat/primitives/_serialization.py,sha256=nl1g48RG17TWhegK8WKlBlXquMae_lmUSzgZnEqdwbU,1307
cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=DwsPrun2J00dimo7mq73llEb-O-N4qaOwEx5SwQbleI,909
cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-36.pyc,,
cryptography/hazmat/primitives/asymmetric/dh.py,sha256=dyNhMSOqPNPVuVtvpUNVwPiPHkeqFrKy6lYSPTn4VqI,6303
cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=TdeZwnJq8ODqcoreu4jr1LFoFYtxA_z_6mhF8dYc5Yg,8116
cryptography/hazmat/primitives/asymmetric/ec.py,sha256=1e0IpF8SbzrKPbPD4BYTazOaVrVCXMd406x5hzlB3_0,14613
cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=Q42f1Cpnlt9UTSfh29T8xcdEgiNaiWr2Wic3sL_eJnk,2719
cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=SmBsd5pf3RaJoVxETIAcXC_DB6YGsrJUOrWE1BPx3T0,2630
cryptography/hazmat/primitives/asymmetric/padding.py,sha256=ETdsTtHWSER0ZmTWoCVnWPkG9wvBIxGtal-e6xxl0i4,2115
cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=Ekxr0B_O2IUre0kw_oIiLJNtx46ADqC6caypjI6d_0w,12004
cryptography/hazmat/primitives/asymmetric/utils.py,sha256=prIqN-UBc7RfOzFMgM8ON2s3DX8MrXeUlUH1LnmG8gg,1225
cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=-nbaGlgT1sufO9Ic-urwKDql8Da0U3GL6hZJIMqHgVc,2588
cryptography/hazmat/primitives/asymmetric/x448.py,sha256=38mR8pqTBFWz5Emv9cQGlqtv_Qg37Bmrla0kRc2HmrU,2549
cryptography/hazmat/primitives/ciphers/__init__.py,sha256=njx_RoatYaxZD0rYhYGi84WQnTZkMSpK67UfWIqkQpE,582
cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-36.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-36.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-36.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-36.pyc,,
cryptography/hazmat/primitives/ciphers/aead.py,sha256=eKzVH2mf-5aFSaBOG9JnJAAd7XBnf9w4BH2Uu2ZT01w,6833
cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=EEJCTrUCe8iHN2O1f_bwR2UqhOemhi53-34WsQ6DddI,3829
cryptography/hazmat/primitives/ciphers/base.py,sha256=w8_AWJwX1PrWpvjeB-_RF3iobalR3Hu3HIMDOMr92c8,7164
cryptography/hazmat/primitives/ciphers/modes.py,sha256=mOnOgXyoD0N9NsSOkZvA8qMA3V5O7HubVwYiWVJvRFs,6549
cryptography/hazmat/primitives/cmac.py,sha256=Kkzk8VQHe-_cYeVab24S4ODMWJOZkC4bLWLvCoMWyvQ,2158
cryptography/hazmat/primitives/constant_time.py,sha256=6bkW00QjhKusdgsQbexXhMlGX0XRN59XNmxWS2W38NA,387
cryptography/hazmat/primitives/hashes.py,sha256=cLNJcKKsI8E6ZhENKkppsJ_8S6W97y0tHzXa-ABBhtY,6051
cryptography/hazmat/primitives/hmac.py,sha256=rhrLt6LwlzbIvnqpmOQVT6L_4Xd9xBsUBunPCkHcvWs,2332
cryptography/hazmat/primitives/kdf/__init__.py,sha256=DcZhzfLG8d8IYBH771lGTVU5S87OQDpu3nrfOwZnsmA,715
cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-36.pyc,,
cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=F9wepne-IRmhTZ9J4H_XLDI0Rl8LccY6wvhVA0jQ4Tc,4576
cryptography/hazmat/primitives/kdf/hkdf.py,sha256=doR70wjOcA56hxhhQtV2M-ekajjjr5hoT5F8KMxoZdo,3807
cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=teuWbRvCZShWiRnv0eg-sXrxm-g7Ss02Ulb3vVbzPvc,5195
cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=4HaLcppspYe8od6vur0E408qYgQPjJKtI9kDrWesIdo,2261
cryptography/hazmat/primitives/kdf/scrypt.py,sha256=vCMYGRp-Q--9DxiDQHbkVVRXkhrQTR0qkC0LriV6Hy8,2248
cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=N5-2KOA2Z-7kAxjhhU5quNcRpmThyQC5dhU-Cw95jWk,2458
cryptography/hazmat/primitives/keywrap.py,sha256=ibpVZ19OGcoEVrSE7cizdoMDdRDaqcATeVRK5_4MCO4,5927
cryptography/hazmat/primitives/padding.py,sha256=PYlgTNHZUYROnQZ1oeeqKm1WyzkqLlwIpRUgdASHOG8,6193
cryptography/hazmat/primitives/poly1305.py,sha256=_Dtv6oCMn94rAhQ6pjie9mO_MiDLVL5It3Z5sdpCU3c,1711
cryptography/hazmat/primitives/serialization/__init__.py,sha256=RALEthF7wRjlMyTvSq09XmKQey74tsSdDCCsDaD6yQU,1129
cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-36.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-36.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-36.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-36.pyc,,
cryptography/hazmat/primitives/serialization/base.py,sha256=OYqk2UnIR5IAKP1QRNifhoQw-HX3etcWudn3W2JVIyg,1440
cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=JuWr5Vqz6zEpjh3j7ME1SCk3TFDNhONjQds_Se7XpFg,2270
cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=CsmnGEbtLKm2o6D7h_a-EvHQOfwlHxrV96VkjnrNX7s,5223
cryptography/hazmat/primitives/serialization/ssh.py,sha256=doX0irj_Q1wd1N_JU-Xic_5zUkMH_zZKcQUUOB-axGk,22293
cryptography/hazmat/primitives/twofactor/__init__.py,sha256=ZHo4zwWidFP2RWFl8luiNuYkVMZPghzx54izPNSCtD4,222
cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-36.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-36.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-36.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-36.pyc,,
cryptography/hazmat/primitives/twofactor/hotp.py,sha256=JXph-N0S8CDM-laRoV_G-Welhn7PvcpgXTxRbp_yEjk,2826
cryptography/hazmat/primitives/twofactor/totp.py,sha256=2GTFsdUdA585-N_sqfPhlBBWDY-ExaH1HKH1p3XPWmk,1912
cryptography/hazmat/primitives/twofactor/utils.py,sha256=8TG5oyaz8CxHCXqqh26iAny9w_W1e9SgVdCZaeEzOwU,982
cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography/utils.py,sha256=dyYUz2jr1tTsYQ3SaX3_cBYu720kopdatNy_83L1Mkc,4861
cryptography/x509/__init__.py,sha256=4_Xsv7yVMCGbpIbSgc4SPxDX-3Mn83gN07Us1PAM_eA,7634
cryptography/x509/__pycache__/__init__.cpython-36.pyc,,
cryptography/x509/__pycache__/base.cpython-36.pyc,,
cryptography/x509/__pycache__/certificate_transparency.cpython-36.pyc,,
cryptography/x509/__pycache__/extensions.cpython-36.pyc,,
cryptography/x509/__pycache__/general_name.cpython-36.pyc,,
cryptography/x509/__pycache__/name.cpython-36.pyc,,
cryptography/x509/__pycache__/ocsp.cpython-36.pyc,,
cryptography/x509/__pycache__/oid.cpython-36.pyc,,
cryptography/x509/base.py,sha256=duSe4bIuBiJ5g2NC8-VSxDfqHZ0CEEcXZKhcBGq-eeA,28193
cryptography/x509/certificate_transparency.py,sha256=rzJvxd1FVfc5gOjUT-T2VF5vcOC597UrrI_5JJwZprI,979
cryptography/x509/extensions.py,sha256=M-n_8gEjO5_03ufGHoK_6w8YSSiNyWvHUJ5Kgq5zoN4,54019
cryptography/x509/general_name.py,sha256=5dld2ktZnCEg3l14UyKk6DSlzFHXlc6WxW5J8R8Mk-Q,8161
cryptography/x509/name.py,sha256=PpRua5nWFLZtOg77XdaybGVNspO8ZvQ7ddNDn203vys,8529
cryptography/x509/ocsp.py,sha256=ERB5osTWbNieLj945Xoq0NjBkzqodo_WBL7ORaC2fDg,14738
cryptography/x509/oid.py,sha256=1PxP9Pr_lh77zqyvTJefeRozK3VYaRlNmWfYfDWr2Ak,12619

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: false
Tag: cp36-abi3-win_amd64

View File

@ -0,0 +1,3 @@
_openssl
_padding
cryptography

View File

@ -0,0 +1,30 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "cryptography"
__summary__ = (
"cryptography is a package which provides cryptographic recipes"
" and primitives to Python developers."
)
__uri__ = "https://github.com/pyca/cryptography"
__version__ = "3.4.7"
__author__ = "The Python Cryptographic Authority and individual contributors"
__email__ = "cryptography-dev@python.org"
__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2013-2021 {}".format(__author__)

View File

@ -0,0 +1,27 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.__about__ import (
__author__,
__copyright__,
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
)
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]

View File

@ -0,0 +1,57 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from enum import Enum
class _Reasons(Enum):
BACKEND_MISSING_INTERFACE = 0
UNSUPPORTED_HASH = 1
UNSUPPORTED_CIPHER = 2
UNSUPPORTED_PADDING = 3
UNSUPPORTED_MGF = 4
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
UNSUPPORTED_ELLIPTIC_CURVE = 6
UNSUPPORTED_SERIALIZATION = 7
UNSUPPORTED_X509 = 8
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
UNSUPPORTED_DIFFIE_HELLMAN = 10
UNSUPPORTED_MAC = 11
class UnsupportedAlgorithm(Exception):
def __init__(self, message, reason=None):
super(UnsupportedAlgorithm, self).__init__(message)
self._reason = reason
class AlreadyFinalized(Exception):
pass
class AlreadyUpdated(Exception):
pass
class NotYetFinalized(Exception):
pass
class InvalidTag(Exception):
pass
class InvalidSignature(Exception):
pass
class InternalError(Exception):
def __init__(self, msg, err_code):
super(InternalError, self).__init__(msg)
self.err_code = err_code
class InvalidKey(Exception):
pass

View File

@ -0,0 +1,204 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import base64
import binascii
import os
import struct
import time
import typing
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends import _get_backend
from cryptography.hazmat.primitives import hashes, padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives.hmac import HMAC
class InvalidToken(Exception):
pass
_MAX_CLOCK_SKEW = 60
class Fernet(object):
def __init__(self, key: bytes, backend=None):
backend = _get_backend(backend)
key = base64.urlsafe_b64decode(key)
if len(key) != 32:
raise ValueError(
"Fernet key must be 32 url-safe base64-encoded bytes."
)
self._signing_key = key[:16]
self._encryption_key = key[16:]
self._backend = backend
@classmethod
def generate_key(cls) -> bytes:
return base64.urlsafe_b64encode(os.urandom(32))
def encrypt(self, data: bytes) -> bytes:
return self.encrypt_at_time(data, int(time.time()))
def encrypt_at_time(self, data: bytes, current_time: int) -> bytes:
iv = os.urandom(16)
return self._encrypt_from_parts(data, current_time, iv)
def _encrypt_from_parts(
self, data: bytes, current_time: int, iv: bytes
) -> bytes:
utils._check_bytes("data", data)
padder = padding.PKCS7(algorithms.AES.block_size).padder()
padded_data = padder.update(data) + padder.finalize()
encryptor = Cipher(
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
).encryptor()
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
basic_parts = (
b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
)
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
h.update(basic_parts)
hmac = h.finalize()
return base64.urlsafe_b64encode(basic_parts + hmac)
def decrypt(self, token: bytes, ttl: typing.Optional[int] = None) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(token)
if ttl is None:
time_info = None
else:
time_info = (ttl, int(time.time()))
return self._decrypt_data(data, timestamp, time_info)
def decrypt_at_time(
self, token: bytes, ttl: int, current_time: int
) -> bytes:
if ttl is None:
raise ValueError(
"decrypt_at_time() can only be used with a non-None ttl"
)
timestamp, data = Fernet._get_unverified_token_data(token)
return self._decrypt_data(data, timestamp, (ttl, current_time))
def extract_timestamp(self, token: bytes) -> int:
timestamp, data = Fernet._get_unverified_token_data(token)
# Verify the token was not tampered with.
self._verify_signature(data)
return timestamp
@staticmethod
def _get_unverified_token_data(token: bytes) -> typing.Tuple[int, bytes]:
utils._check_bytes("token", token)
try:
data = base64.urlsafe_b64decode(token)
except (TypeError, binascii.Error):
raise InvalidToken
if not data or data[0] != 0x80:
raise InvalidToken
try:
(timestamp,) = struct.unpack(">Q", data[1:9])
except struct.error:
raise InvalidToken
return timestamp, data
def _verify_signature(self, data: bytes) -> None:
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
h.update(data[:-32])
try:
h.verify(data[-32:])
except InvalidSignature:
raise InvalidToken
def _decrypt_data(
self,
data: bytes,
timestamp: int,
time_info: typing.Optional[typing.Tuple[int, int]],
) -> bytes:
if time_info is not None:
ttl, current_time = time_info
if timestamp + ttl < current_time:
raise InvalidToken
if current_time + _MAX_CLOCK_SKEW < timestamp:
raise InvalidToken
self._verify_signature(data)
iv = data[9:25]
ciphertext = data[25:-32]
decryptor = Cipher(
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
).decryptor()
plaintext_padded = decryptor.update(ciphertext)
try:
plaintext_padded += decryptor.finalize()
except ValueError:
raise InvalidToken
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
unpadded = unpadder.update(plaintext_padded)
try:
unpadded += unpadder.finalize()
except ValueError:
raise InvalidToken
return unpadded
class MultiFernet(object):
def __init__(self, fernets: typing.Iterable[Fernet]):
fernets = list(fernets)
if not fernets:
raise ValueError(
"MultiFernet requires at least one Fernet instance"
)
self._fernets = fernets
def encrypt(self, msg: bytes) -> bytes:
return self.encrypt_at_time(msg, int(time.time()))
def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
return self._fernets[0].encrypt_at_time(msg, current_time)
def rotate(self, msg: bytes) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(msg)
for f in self._fernets:
try:
p = f._decrypt_data(data, timestamp, None)
break
except InvalidToken:
pass
else:
raise InvalidToken
iv = os.urandom(16)
return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
def decrypt(self, msg: bytes, ttl: typing.Optional[int] = None) -> bytes:
for f in self._fernets:
try:
return f.decrypt(msg, ttl)
except InvalidToken:
pass
raise InvalidToken
def decrypt_at_time(
self, msg: bytes, ttl: int, current_time: int
) -> bytes:
for f in self._fernets:
try:
return f.decrypt_at_time(msg, ttl, current_time)
except InvalidToken:
pass
raise InvalidToken

View File

@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
"""
Hazardous Materials
This is a "Hazardous Materials" module. You should ONLY use it if you're
100% absolutely sure that you know what you're doing because this module
is full of land mines, dragons, and dinosaurs with laser guns.
"""

View File

@ -0,0 +1,156 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.utils import int_to_bytes
# This module contains a lightweight DER encoder and decoder. See X.690 for the
# specification. This module intentionally does not implement the more complex
# BER encoding, only DER.
#
# Note this implementation treats an element's constructed bit as part of the
# tag. This is fine for DER, where the bit is always computable from the type.
CONSTRUCTED = 0x20
CONTEXT_SPECIFIC = 0x80
INTEGER = 0x02
BIT_STRING = 0x03
OCTET_STRING = 0x04
NULL = 0x05
OBJECT_IDENTIFIER = 0x06
SEQUENCE = 0x10 | CONSTRUCTED
SET = 0x11 | CONSTRUCTED
PRINTABLE_STRING = 0x13
UTC_TIME = 0x17
GENERALIZED_TIME = 0x18
class DERReader(object):
def __init__(self, data):
self.data = memoryview(data)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_value is None:
self.check_empty()
def is_empty(self):
return len(self.data) == 0
def check_empty(self):
if not self.is_empty():
raise ValueError("Invalid DER input: trailing data")
def read_byte(self) -> int:
if len(self.data) < 1:
raise ValueError("Invalid DER input: insufficient data")
ret = self.data[0]
self.data = self.data[1:]
return ret
def read_bytes(self, n) -> memoryview:
if len(self.data) < n:
raise ValueError("Invalid DER input: insufficient data")
ret = self.data[:n]
self.data = self.data[n:]
return ret
def read_any_element(self) -> typing.Tuple[int, "DERReader"]:
tag = self.read_byte()
# Tag numbers 31 or higher are stored in multiple bytes. No supported
# ASN.1 types use such tags, so reject these.
if tag & 0x1F == 0x1F:
raise ValueError("Invalid DER input: unexpected high tag number")
length_byte = self.read_byte()
if length_byte & 0x80 == 0:
# If the high bit is clear, the first length byte is the length.
length = length_byte
else:
# If the high bit is set, the first length byte encodes the length
# of the length.
length_byte &= 0x7F
if length_byte == 0:
raise ValueError(
"Invalid DER input: indefinite length form is not allowed "
"in DER"
)
length = 0
for i in range(length_byte):
length <<= 8
length |= self.read_byte()
if length == 0:
raise ValueError(
"Invalid DER input: length was not minimally-encoded"
)
if length < 0x80:
# If the length could have been encoded in short form, it must
# not use long form.
raise ValueError(
"Invalid DER input: length was not minimally-encoded"
)
body = self.read_bytes(length)
return tag, DERReader(body)
def read_element(self, expected_tag: int) -> "DERReader":
tag, body = self.read_any_element()
if tag != expected_tag:
raise ValueError("Invalid DER input: unexpected tag")
return body
def read_single_element(self, expected_tag: int) -> "DERReader":
with self:
return self.read_element(expected_tag)
def read_optional_element(
self, expected_tag: int
) -> typing.Optional["DERReader"]:
if len(self.data) > 0 and self.data[0] == expected_tag:
return self.read_element(expected_tag)
return None
def as_integer(self) -> int:
if len(self.data) == 0:
raise ValueError("Invalid DER input: empty integer contents")
first = self.data[0]
if first & 0x80 == 0x80:
raise ValueError("Negative DER integers are not supported")
# The first 9 bits must not all be zero or all be ones. Otherwise, the
# encoding should have been one byte shorter.
if len(self.data) > 1:
second = self.data[1]
if first == 0 and second & 0x80 == 0:
raise ValueError(
"Invalid DER input: integer not minimally-encoded"
)
return int.from_bytes(self.data, "big")
def encode_der_integer(x: int) -> bytes:
if not isinstance(x, int):
raise ValueError("Value must be an integer")
if x < 0:
raise ValueError("Negative integers are not supported")
n = x.bit_length() // 8 + 1
return int_to_bytes(x, n)
def encode_der(tag: int, *children: bytes) -> bytes:
length = 0
for child in children:
length += len(child)
chunks = [bytes([tag])]
if length < 0x80:
chunks.append(bytes([length]))
else:
length_bytes = int_to_bytes(length)
chunks.append(bytes([0x80 | len(length_bytes)]))
chunks.append(length_bytes)
chunks.extend(children)
return b"".join(chunks)

View File

@ -0,0 +1,76 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography import utils
class ObjectIdentifier(object):
def __init__(self, dotted_string: str):
self._dotted_string = dotted_string
nodes = self._dotted_string.split(".")
intnodes = []
# There must be at least 2 nodes, the first node must be 0..2, and
# if less than 2, the second node cannot have a value outside the
# range 0..39. All nodes must be integers.
for node in nodes:
try:
node_value = int(node, 10)
except ValueError:
raise ValueError(
"Malformed OID: %s (non-integer nodes)"
% (self._dotted_string)
)
if node_value < 0:
raise ValueError(
"Malformed OID: %s (negative-integer nodes)"
% (self._dotted_string)
)
intnodes.append(node_value)
if len(nodes) < 2:
raise ValueError(
"Malformed OID: %s (insufficient number of nodes)"
% (self._dotted_string)
)
if intnodes[0] > 2:
raise ValueError(
"Malformed OID: %s (first node outside valid range)"
% (self._dotted_string)
)
if intnodes[0] < 2 and intnodes[1] >= 40:
raise ValueError(
"Malformed OID: %s (second node outside valid range)"
% (self._dotted_string)
)
def __eq__(self, other):
if not isinstance(other, ObjectIdentifier):
return NotImplemented
return self.dotted_string == other.dotted_string
def __ne__(self, other):
return not self == other
def __repr__(self):
return "<ObjectIdentifier(oid={}, name={})>".format(
self.dotted_string, self._name
)
def __hash__(self):
return hash(self.dotted_string)
@property
def _name(self):
# Lazy import to avoid an import cycle
from cryptography.x509.oid import _OID_NAMES
return _OID_NAMES.get(self, "Unknown OID")
dotted_string = utils.read_only_property("_dotted_string")

Some files were not shown because too many files have changed in this diff Show More